file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
detect_outlier.py | import numpy as np
import sys
sys.path.append("../Pipeline/Audio/Pipeline/")
from AudioPipe.features import mfcc # Feature Extraction Module, part of the shared preprocessing
import AudioPipe.speaker.recognition as SR # Speaker Recognition Module
import scipy.io.wavfile as wav
import commands, os
import json
import argparse
import warnings
from scipy import stats
def outlier_detect(audio_dir, spk_name):
spk_dir = os.path.join(audio_dir,spk_name)
list_fn = os.path.join(spk_dir,"clip_list.txt")
clip_ls = from_jsonfile(list_fn)
audio_merge = merge_clips(spk_dir, clip_ls)
# Training a model based on the merged audio
Model = SR.GMMRec()
Model.enroll_file(spk_name, audio_merge)
Model.train()
# Score each utterance in the training set
llhd_ls = []
new_ls = []
stat_fn = os.path.join(audio_dir,"stats.json")
if os.path.exists(stat_fn) and os.path.getsize(stat_fn) > 0:
stat_dict = from_jsonfile(stat_fn)
else:
stat_dict = {}
if spk_name not in stat_dict:
stat_dict[spk_name]={}
for clip in clip_ls:
audio_test = os.path.join(spk_dir,clip["name"])
#commands.getstatusoutput("ffmpeg -i "+audio_test+" -vn -f wav -ab 16k "+audio_test)
try:
llhd = Model.predict(Model.get_mfcc(audio_test))[1]
except ValueError:
print clip["name"]
continue
llhd_ls.append(llhd)
clip["llhd"] = llhd
new_ls.append(clip)
z_score = stats.zscore(llhd_ls)
for i in xrange(len(llhd_ls)):
new_ls[i]["zscore"] = z_score[i]
with open(list_fn, "w") as fh:
fh.write(to_json(new_ls, indent=2))
stat_dict[spk_name]["clip_num"]=len(clip_ls)
stat_dict[spk_name]["zpos_num"]=sum(z_score>0)
stat_dict[spk_name]["total_duration"]=sum([get_sec(clp["duration"]) for clp in new_ls])
stat_dict[spk_name]["clean_duration"]=sum([get_sec(clp["duration"]) for clp in new_ls if clp["zscore"]>-0.00001])
with open(stat_fn, "w") as fh:
fh.write(to_json(stat_dict, indent=2))
os.remove(audio_merge)
return llhd_ls
def | (spk_dir, clip_ls):
# Write the list of clips into a file for merging training data
temp_fl = os.path.join(spk_dir,"temp.txt")
count = 0
with open(temp_fl, "w") as fh:
for clip in clip_ls:
if count>100:
break
fh.write("file "+clip["name"]+"\n")
count+=1
# Merge all the data into one audio
audio_merge = os.path.join(spk_dir,"merged_gross.wav")
commands.getstatusoutput("ffmpeg -f concat -i "+temp_fl.replace(" ", "\ ")+" -c copy -y "+audio_merge)
os.remove(temp_fl)
return audio_merge
def from_jsonfile(filename):
with open(filename) as fh:
return json.load(fh)
def get_sec(time_str):
h, m, s = time_str.split(':')
return int(h) * 3600 + int(m) * 60 + float(s)
def to_json(result, **kwargs):
'''Return a JSON representation of the aligned transcript'''
options = {
'sort_keys': True,
'indent': 4,
'separators': (',', ': '),
}
options.update(kwargs)
return json.dumps(result, **options)
parser = argparse.ArgumentParser(
description='Detect outliers in a training dataset of one speaker.')
parser.add_argument(
'-i', '--input', dest='input_dir', type=str,
help='directory of audio clips')
parser.add_argument(
'-s', '--spk', dest='spk_name', type=str,
help='the name of the speaker')
args = parser.parse_args()
audio_dir = args.input_dir
spk_name = args.spk_name
with warnings.catch_warnings():
warnings.simplefilter("ignore")
outlier_detect(audio_dir, spk_name)
| merge_clips | identifier_name |
detect_outlier.py | import numpy as np
import sys
sys.path.append("../Pipeline/Audio/Pipeline/")
from AudioPipe.features import mfcc # Feature Extraction Module, part of the shared preprocessing
import AudioPipe.speaker.recognition as SR # Speaker Recognition Module
import scipy.io.wavfile as wav
import commands, os
import json
import argparse
import warnings
from scipy import stats
def outlier_detect(audio_dir, spk_name):
spk_dir = os.path.join(audio_dir,spk_name)
list_fn = os.path.join(spk_dir,"clip_list.txt")
clip_ls = from_jsonfile(list_fn)
audio_merge = merge_clips(spk_dir, clip_ls)
# Training a model based on the merged audio
Model = SR.GMMRec()
Model.enroll_file(spk_name, audio_merge)
Model.train()
# Score each utterance in the training set
llhd_ls = []
new_ls = []
stat_fn = os.path.join(audio_dir,"stats.json")
if os.path.exists(stat_fn) and os.path.getsize(stat_fn) > 0:
stat_dict = from_jsonfile(stat_fn)
else:
stat_dict = {}
if spk_name not in stat_dict:
stat_dict[spk_name]={}
for clip in clip_ls:
audio_test = os.path.join(spk_dir,clip["name"])
#commands.getstatusoutput("ffmpeg -i "+audio_test+" -vn -f wav -ab 16k "+audio_test)
try:
llhd = Model.predict(Model.get_mfcc(audio_test))[1]
except ValueError:
print clip["name"]
continue
llhd_ls.append(llhd)
clip["llhd"] = llhd
new_ls.append(clip)
z_score = stats.zscore(llhd_ls)
for i in xrange(len(llhd_ls)):
|
with open(list_fn, "w") as fh:
fh.write(to_json(new_ls, indent=2))
stat_dict[spk_name]["clip_num"]=len(clip_ls)
stat_dict[spk_name]["zpos_num"]=sum(z_score>0)
stat_dict[spk_name]["total_duration"]=sum([get_sec(clp["duration"]) for clp in new_ls])
stat_dict[spk_name]["clean_duration"]=sum([get_sec(clp["duration"]) for clp in new_ls if clp["zscore"]>-0.00001])
with open(stat_fn, "w") as fh:
fh.write(to_json(stat_dict, indent=2))
os.remove(audio_merge)
return llhd_ls
def merge_clips(spk_dir, clip_ls):
# Write the list of clips into a file for merging training data
temp_fl = os.path.join(spk_dir,"temp.txt")
count = 0
with open(temp_fl, "w") as fh:
for clip in clip_ls:
if count>100:
break
fh.write("file "+clip["name"]+"\n")
count+=1
# Merge all the data into one audio
audio_merge = os.path.join(spk_dir,"merged_gross.wav")
commands.getstatusoutput("ffmpeg -f concat -i "+temp_fl.replace(" ", "\ ")+" -c copy -y "+audio_merge)
os.remove(temp_fl)
return audio_merge
def from_jsonfile(filename):
with open(filename) as fh:
return json.load(fh)
def get_sec(time_str):
h, m, s = time_str.split(':')
return int(h) * 3600 + int(m) * 60 + float(s)
def to_json(result, **kwargs):
'''Return a JSON representation of the aligned transcript'''
options = {
'sort_keys': True,
'indent': 4,
'separators': (',', ': '),
}
options.update(kwargs)
return json.dumps(result, **options)
parser = argparse.ArgumentParser(
description='Detect outliers in a training dataset of one speaker.')
parser.add_argument(
'-i', '--input', dest='input_dir', type=str,
help='directory of audio clips')
parser.add_argument(
'-s', '--spk', dest='spk_name', type=str,
help='the name of the speaker')
args = parser.parse_args()
audio_dir = args.input_dir
spk_name = args.spk_name
with warnings.catch_warnings():
warnings.simplefilter("ignore")
outlier_detect(audio_dir, spk_name)
| new_ls[i]["zscore"] = z_score[i] | conditional_block |
activity.js | (function () {
'use strict';
var module = angular.module('fim.base');
module.config(function($routeProvider) {
$routeProvider
.when('/activity/:engine/:section/:period', {
templateUrl: 'partials/activity.html',
controller: 'ActivityController'
});
});
module.controller('ActivityController', function($scope, $location, $routeParams, nxt, requests, $q, $sce,
ActivityProvider, BlocksProvider, ForgersProvider, StatisticsProvider, AllAssetsProvider, BlockStateProvider,
$timeout, dateParser, dateFilter, $rootScope) {
$rootScope.paramEngine = $routeParams.engine;
$scope.paramEngine = $routeParams.engine;
$scope.paramSection = $routeParams.section;
$scope.paramPeriod = $routeParams.period;
$scope.paramTimestamp = 0;
$scope.statistics = {};
$scope.blockstate = {};
$scope.breadcrumb = [];
$scope.filter = {};
if ($scope.paramEngine == 'nxt') { var api = nxt.nxt(); }
else if ($scope.paramEngine == 'fim') { var api = nxt.fim(); }
else {
$location.path('/activity/fim/activity/latest');
return;
}
if (['activity', 'blockchain', 'forgers', 'assets'].indexOf($scope.paramSection) == -1) {
$location.path('/activity/'+$scope.paramEngine+'/activity/latest');
return;
}
/* Breadcrumbs */
$scope.breadcrumb.push({
label: 'translate.home',
href: "#/home/"+$scope.paramEngine+"/activity/latest",
translate: true
});
$scope.breadcrumb.push({
label: 'translate.explorer',
href: '#/activity/fim/activity/latest',
translate: true
});
$scope.breadcrumb.push({
label: api.engine.symbol,
active: true
});
$scope.breadcrumb.push({
label: 'translate.'+$scope.paramSection,
translate: true
});
if (['activity','blockchain'].indexOf($scope.paramSection) != -1) {
if ($scope.paramPeriod == 'latest') {
$scope.breadcrumb.push({
label: 'translate.latest',
translate: true
});
}
else {
$scope.breadcrumb.push({
label: $scope.paramPeriod,
period: true
});
}
}
/* Date picker */
$scope.dt = null;
$scope.format = 'dd-MMMM-yyyy';
if ($scope.paramPeriod != 'latest') {
var d = dateParser.parse($scope.paramPeriod, $scope.format);
if (!d) {
$location.path('/activity/'+$scope.paramEngine+'/'+$scope.paramSection+'/latest');
return;
}
$scope.dt = $scope.paramPeriod;
/* Timestamp is for 00:00 hour on selected day */
d = new Date(d.getFullYear(), d.getMonth(), d.getDate()+1, 0, 0, 0);
$scope.paramTimestamp = nxt.util.convertToEpochTimestamp(d.getTime());
}
$scope.symbol = api.engine.symbol;
$scope.blockstate['TYPE_FIM'] = new BlockStateProvider(nxt.fim(), $scope);
$scope.blockstate['TYPE_FIM'].load();
if ($rootScope.enableDualEngines) {
$scope.blockstate['TYPE_NXT'] = new BlockStateProvider(nxt.nxt(), $scope);
$scope.blockstate['TYPE_NXT'].load();
}
switch ($scope.paramSection) {
case 'activity':
$scope.showFilter = true;
$scope.showTransactionFilter = true;
$scope.provider = new ActivityProvider(api, $scope, $scope.paramTimestamp, null, $scope.filter);
$scope.provider.reload();
break;
case 'blockchain':
$scope.showFilter = true;
$scope.provider = new BlocksProvider(api, $scope, $scope.paramTimestamp);
$scope.provider.reload();
break;
case 'forgers':
$scope.showFilter = false;
$scope.provider = new ForgersProvider(api, $scope);
$scope.provider.reload();
break;
case 'assets':
$scope.showFilter = false;
$scope.provider = new AllAssetsProvider(api, $scope, 10);
$scope.provider.reload();
break;
default:
throw new Error('Not reached');
}
$scope.minDate = new Date(Date.UTC(2013, 10, 24, 12, 0, 0, 0));
$scope.maxDate = new Date();
$scope.dateOptions = {
formatYear: 'yy',
startingDay: 1
};
$scope.openDatePicker = function($event) {
$event.preventDefault();
$event.stopPropagation();
$scope.opened = true;
};
var stopWatching = false;
$scope.$watch('dt', function (newValue, oldValue) {
if (newValue && newValue !== oldValue && typeof oldValue != 'string' && !stopWatching) |
});
if ($scope.showTransactionFilter) {
$scope.filter.all = true;
$scope.filter.payments = true;
$scope.filter.messages = true;
$scope.filter.aliases = true;
$scope.filter.namespacedAliases = true;
$scope.filter.polls = true;
$scope.filter.accountInfo = true;
$scope.filter.announceHub = true;
$scope.filter.goodsStore = true;
$scope.filter.balanceLeasing = true;
$scope.filter.trades = true;
$scope.filter.assetIssued = true;
$scope.filter.assetTransfer = true;
$scope.filter.assetOrder = true;
$scope.filter.currencyIssued = true;
$scope.filter.currencyTransfer = true;
$scope.filter.currencyOther = true;
$scope.filterAllChanged = function () {
$scope.$evalAsync(function () {
var on = $scope.filter.all;
$scope.filter.payments = on;
$scope.filter.messages = on;
$scope.filter.aliases = on;
$scope.filter.namespacedAliases = on;
$scope.filter.polls = on;
$scope.filter.accountInfo = on;
$scope.filter.announceHub = on;
$scope.filter.goodsStore = on;
$scope.filter.balanceLeasing = on;
$scope.filter.trades = on;
$scope.filter.assetIssued = on;
$scope.filter.assetTransfer = on;
$scope.filter.assetOrder = on;
$scope.filter.currencyIssued = on;
$scope.filter.currencyTransfer = on;
$scope.filter.currencyOther = on;
$scope.filterChanged();
});
}
$scope.filterChanged = function () {
$scope.provider.applyFilter($scope.filter);
}
}
$scope.loadStatistics = function (engine, collapse_var) {
$scope[collapse_var] = !$scope[collapse_var];
if (!$scope[collapse_var]) {
if (!$scope.statistics[engine]) {
var api = nxt.get(engine);
$scope.statistics[engine] = new StatisticsProvider(api, $scope);
}
$scope.statistics[engine].load();
}
}
});
})(); | {
stopWatching = true;
var formatted = dateFilter(newValue, $scope.format);
$location.path('/activity/'+$scope.paramEngine+'/'+$scope.paramSection+'/'+formatted);
} | conditional_block |
activity.js | (function () {
'use strict';
var module = angular.module('fim.base');
module.config(function($routeProvider) {
$routeProvider
.when('/activity/:engine/:section/:period', {
templateUrl: 'partials/activity.html',
controller: 'ActivityController'
});
});
module.controller('ActivityController', function($scope, $location, $routeParams, nxt, requests, $q, $sce,
ActivityProvider, BlocksProvider, ForgersProvider, StatisticsProvider, AllAssetsProvider, BlockStateProvider,
$timeout, dateParser, dateFilter, $rootScope) {
$rootScope.paramEngine = $routeParams.engine;
$scope.paramEngine = $routeParams.engine;
$scope.paramSection = $routeParams.section;
$scope.paramPeriod = $routeParams.period;
$scope.paramTimestamp = 0;
$scope.statistics = {};
$scope.blockstate = {};
$scope.breadcrumb = [];
$scope.filter = {};
if ($scope.paramEngine == 'nxt') { var api = nxt.nxt(); }
else if ($scope.paramEngine == 'fim') { var api = nxt.fim(); }
else {
$location.path('/activity/fim/activity/latest');
return;
}
if (['activity', 'blockchain', 'forgers', 'assets'].indexOf($scope.paramSection) == -1) {
$location.path('/activity/'+$scope.paramEngine+'/activity/latest');
return;
}
/* Breadcrumbs */
$scope.breadcrumb.push({
label: 'translate.home',
href: "#/home/"+$scope.paramEngine+"/activity/latest",
translate: true
});
$scope.breadcrumb.push({
label: 'translate.explorer',
href: '#/activity/fim/activity/latest',
translate: true
});
$scope.breadcrumb.push({
label: api.engine.symbol,
active: true
});
$scope.breadcrumb.push({
label: 'translate.'+$scope.paramSection,
translate: true
});
if (['activity','blockchain'].indexOf($scope.paramSection) != -1) {
if ($scope.paramPeriod == 'latest') {
$scope.breadcrumb.push({
label: 'translate.latest',
translate: true
});
}
else {
$scope.breadcrumb.push({
label: $scope.paramPeriod,
period: true
});
}
}
/* Date picker */
$scope.dt = null;
$scope.format = 'dd-MMMM-yyyy';
if ($scope.paramPeriod != 'latest') {
var d = dateParser.parse($scope.paramPeriod, $scope.format);
if (!d) {
$location.path('/activity/'+$scope.paramEngine+'/'+$scope.paramSection+'/latest');
return;
}
$scope.dt = $scope.paramPeriod;
/* Timestamp is for 00:00 hour on selected day */
d = new Date(d.getFullYear(), d.getMonth(), d.getDate()+1, 0, 0, 0);
$scope.paramTimestamp = nxt.util.convertToEpochTimestamp(d.getTime());
}
$scope.symbol = api.engine.symbol;
$scope.blockstate['TYPE_FIM'] = new BlockStateProvider(nxt.fim(), $scope);
$scope.blockstate['TYPE_FIM'].load();
if ($rootScope.enableDualEngines) {
$scope.blockstate['TYPE_NXT'] = new BlockStateProvider(nxt.nxt(), $scope);
$scope.blockstate['TYPE_NXT'].load();
}
switch ($scope.paramSection) {
case 'activity':
$scope.showFilter = true;
$scope.showTransactionFilter = true;
$scope.provider = new ActivityProvider(api, $scope, $scope.paramTimestamp, null, $scope.filter);
$scope.provider.reload();
break;
case 'blockchain':
$scope.showFilter = true;
$scope.provider = new BlocksProvider(api, $scope, $scope.paramTimestamp);
$scope.provider.reload();
break;
case 'forgers':
$scope.showFilter = false;
$scope.provider = new ForgersProvider(api, $scope);
$scope.provider.reload();
break;
case 'assets':
$scope.showFilter = false;
$scope.provider = new AllAssetsProvider(api, $scope, 10);
$scope.provider.reload();
break;
default:
throw new Error('Not reached');
}
$scope.minDate = new Date(Date.UTC(2013, 10, 24, 12, 0, 0, 0));
$scope.maxDate = new Date();
$scope.dateOptions = {
formatYear: 'yy',
startingDay: 1
};
$scope.openDatePicker = function($event) {
$event.preventDefault();
$event.stopPropagation();
$scope.opened = true;
};
var stopWatching = false;
$scope.$watch('dt', function (newValue, oldValue) {
if (newValue && newValue !== oldValue && typeof oldValue != 'string' && !stopWatching) {
stopWatching = true;
var formatted = dateFilter(newValue, $scope.format);
$location.path('/activity/'+$scope.paramEngine+'/'+$scope.paramSection+'/'+formatted);
}
});
if ($scope.showTransactionFilter) {
$scope.filter.all = true;
$scope.filter.payments = true;
$scope.filter.messages = true;
$scope.filter.aliases = true;
$scope.filter.namespacedAliases = true;
$scope.filter.polls = true;
$scope.filter.accountInfo = true;
$scope.filter.announceHub = true;
$scope.filter.goodsStore = true;
$scope.filter.balanceLeasing = true;
$scope.filter.trades = true;
$scope.filter.assetIssued = true;
$scope.filter.assetTransfer = true;
$scope.filter.assetOrder = true;
$scope.filter.currencyIssued = true;
$scope.filter.currencyTransfer = true;
$scope.filter.currencyOther = true;
$scope.filterAllChanged = function () {
$scope.$evalAsync(function () {
var on = $scope.filter.all;
$scope.filter.payments = on;
$scope.filter.messages = on;
$scope.filter.aliases = on;
$scope.filter.namespacedAliases = on;
$scope.filter.polls = on;
$scope.filter.accountInfo = on;
$scope.filter.announceHub = on;
$scope.filter.goodsStore = on;
$scope.filter.balanceLeasing = on;
$scope.filter.trades = on;
$scope.filter.assetIssued = on;
$scope.filter.assetTransfer = on;
$scope.filter.assetOrder = on;
$scope.filter.currencyIssued = on;
$scope.filter.currencyTransfer = on;
$scope.filter.currencyOther = on;
$scope.filterChanged(); |
$scope.filterChanged = function () {
$scope.provider.applyFilter($scope.filter);
}
}
$scope.loadStatistics = function (engine, collapse_var) {
$scope[collapse_var] = !$scope[collapse_var];
if (!$scope[collapse_var]) {
if (!$scope.statistics[engine]) {
var api = nxt.get(engine);
$scope.statistics[engine] = new StatisticsProvider(api, $scope);
}
$scope.statistics[engine].load();
}
}
});
})(); | });
} | random_line_split |
set_watch_mode.rs | use std::io;
use super::super::{WriteTo, WatchMode, WatchModeReader, WriteResult, Reader, ReaderStatus, MessageInner, Message};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct SetWatchMode {
mode: WatchMode,
}
#[derive(Debug)]
pub struct SetWatchModeReader {
inner: WatchModeReader,
}
impl SetWatchMode {
pub fn new(mode: WatchMode) -> Self {
SetWatchMode { mode }
}
pub fn mode(&self) -> WatchMode {
self.mode
}
pub fn reader() -> SetWatchModeReader |
}
impl MessageInner for SetWatchMode {
#[inline]
fn wrap(self) -> Message {
Message::SetWatchMode(self)
}
}
impl Reader<SetWatchMode> for SetWatchModeReader {
fn resume<R>(&mut self, input: &mut R) -> io::Result<ReaderStatus<SetWatchMode>> where R: io::Read {
let status = self.inner.resume(input)?;
Ok(status.map(|mode| SetWatchMode::new(mode)))
}
fn rewind(&mut self) {
self.inner.rewind();
}
}
impl WriteTo for SetWatchMode {
fn write_to<W: io::Write>(&self, target: &mut W) -> WriteResult {
self.mode.write_to(target)
}
}
#[cfg(test)]
mod test {
use super::*;
use super::super::super::{MessageType, WatchMode};
#[test]
fn test_reader_with_tagged() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = tagged */ 2,
/* tag */ 0, 0, 0, 0, 0, 0, 255, 255
];
test_reader! {
Message::reader(),
input,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::Tagged(65535))))
};
}
#[test]
fn test_reader() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = all */ 1
];
test_reader! {
Message::reader(),
input,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::All)))
};
}
} | {
SetWatchModeReader { inner: WatchMode::reader() }
} | identifier_body |
set_watch_mode.rs | use std::io;
use super::super::{WriteTo, WatchMode, WatchModeReader, WriteResult, Reader, ReaderStatus, MessageInner, Message};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct SetWatchMode {
mode: WatchMode,
}
#[derive(Debug)]
pub struct SetWatchModeReader {
inner: WatchModeReader,
}
impl SetWatchMode {
pub fn | (mode: WatchMode) -> Self {
SetWatchMode { mode }
}
pub fn mode(&self) -> WatchMode {
self.mode
}
pub fn reader() -> SetWatchModeReader {
SetWatchModeReader { inner: WatchMode::reader() }
}
}
impl MessageInner for SetWatchMode {
#[inline]
fn wrap(self) -> Message {
Message::SetWatchMode(self)
}
}
impl Reader<SetWatchMode> for SetWatchModeReader {
fn resume<R>(&mut self, input: &mut R) -> io::Result<ReaderStatus<SetWatchMode>> where R: io::Read {
let status = self.inner.resume(input)?;
Ok(status.map(|mode| SetWatchMode::new(mode)))
}
fn rewind(&mut self) {
self.inner.rewind();
}
}
impl WriteTo for SetWatchMode {
fn write_to<W: io::Write>(&self, target: &mut W) -> WriteResult {
self.mode.write_to(target)
}
}
#[cfg(test)]
mod test {
use super::*;
use super::super::super::{MessageType, WatchMode};
#[test]
fn test_reader_with_tagged() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = tagged */ 2,
/* tag */ 0, 0, 0, 0, 0, 0, 255, 255
];
test_reader! {
Message::reader(),
input,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::Tagged(65535))))
};
}
#[test]
fn test_reader() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = all */ 1
];
test_reader! {
Message::reader(),
input,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::All)))
};
}
} | new | identifier_name |
set_watch_mode.rs | use std::io;
use super::super::{WriteTo, WatchMode, WatchModeReader, WriteResult, Reader, ReaderStatus, MessageInner, Message};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct SetWatchMode {
mode: WatchMode,
}
#[derive(Debug)]
pub struct SetWatchModeReader {
inner: WatchModeReader,
}
impl SetWatchMode {
pub fn new(mode: WatchMode) -> Self {
SetWatchMode { mode }
}
pub fn mode(&self) -> WatchMode {
self.mode
}
pub fn reader() -> SetWatchModeReader {
SetWatchModeReader { inner: WatchMode::reader() }
}
}
impl MessageInner for SetWatchMode {
#[inline]
fn wrap(self) -> Message {
Message::SetWatchMode(self)
}
}
impl Reader<SetWatchMode> for SetWatchModeReader {
fn resume<R>(&mut self, input: &mut R) -> io::Result<ReaderStatus<SetWatchMode>> where R: io::Read {
let status = self.inner.resume(input)?;
Ok(status.map(|mode| SetWatchMode::new(mode)))
}
fn rewind(&mut self) {
self.inner.rewind();
}
}
impl WriteTo for SetWatchMode {
fn write_to<W: io::Write>(&self, target: &mut W) -> WriteResult {
self.mode.write_to(target)
}
}
#[cfg(test)]
mod test {
use super::*;
use super::super::super::{MessageType, WatchMode};
#[test]
fn test_reader_with_tagged() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = tagged */ 2,
/* tag */ 0, 0, 0, 0, 0, 0, 255, 255
];
test_reader! {
Message::reader(),
input,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::Tagged(65535))))
};
}
#[test]
fn test_reader() {
let input = vec![
/* type */ MessageType::SetWatchMode.into(),
/* mode = all */ 1
];
test_reader! {
Message::reader(),
input, | ReaderStatus::Pending,
ReaderStatus::Complete(Message::SetWatchMode(SetWatchMode::new(WatchMode::All)))
};
}
} | ReaderStatus::Pending, | random_line_split |
SchedulerProfiling.js | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {PriorityLevel} from './SchedulerPriorities';
import {enableProfiling} from './SchedulerFeatureFlags';
import {NoPriority} from './SchedulerPriorities';
let runIdCounter: number = 0;
let mainThreadIdCounter: number = 0;
const profilingStateSize = 4;
export const sharedProfilingBuffer = enableProfiling
? // $FlowFixMe Flow doesn't know about SharedArrayBuffer
typeof SharedArrayBuffer === 'function'
? new SharedArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: // $FlowFixMe Flow doesn't know about ArrayBuffer
typeof ArrayBuffer === 'function'
? new ArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: null // Don't crash the init path on IE9
: null;
const profilingState =
enableProfiling && sharedProfilingBuffer !== null
? new Int32Array(sharedProfilingBuffer)
: []; // We can't read this but it helps save bytes for null checks
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
// This is maintained with a counter, because the size of the priority queue
// array might include canceled tasks.
profilingState[QUEUE_SIZE] = 0;
profilingState[CURRENT_TASK_ID] = 0;
}
// Bytes per element is 4
const INITIAL_EVENT_LOG_SIZE = 131072;
const MAX_EVENT_LOG_SIZE = 524288; // Equivalent to 2 megabytes
| let eventLog = null;
let eventLogIndex = 0;
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function logEvent(entries) {
if (eventLog !== null) {
const offset = eventLogIndex;
eventLogIndex += entries.length;
if (eventLogIndex + 1 > eventLogSize) {
eventLogSize *= 2;
if (eventLogSize > MAX_EVENT_LOG_SIZE) {
// Using console['error'] to evade Babel and ESLint
console['error'](
"Scheduler Profiling: Event log exceeded maximum size. Don't " +
'forget to call `stopLoggingProfilingEvents()`.',
);
stopLoggingProfilingEvents();
return;
}
const newEventLog = new Int32Array(eventLogSize * 4);
newEventLog.set(eventLog);
eventLogBuffer = newEventLog.buffer;
eventLog = newEventLog;
}
eventLog.set(entries, offset);
}
}
export function startLoggingProfilingEvents(): void {
eventLogSize = INITIAL_EVENT_LOG_SIZE;
eventLogBuffer = new ArrayBuffer(eventLogSize * 4);
eventLog = new Int32Array(eventLogBuffer);
eventLogIndex = 0;
}
export function stopLoggingProfilingEvents(): ArrayBuffer | null {
const buffer = eventLogBuffer;
eventLogSize = 0;
eventLogBuffer = null;
eventLog = null;
eventLogIndex = 0;
return buffer;
}
export function markTaskStart(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]++;
if (eventLog !== null) {
// performance.now returns a float, representing milliseconds. When the
// event is logged, it's coerced to an int. Convert to microseconds to
// maintain extra degrees of precision.
logEvent([TaskStartEvent, ms * 1000, task.id, task.priorityLevel]);
}
}
}
export function markTaskCompleted(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCompleteEvent, ms * 1000, task.id]);
}
}
}
export function markTaskCanceled(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCancelEvent, ms * 1000, task.id]);
}
}
}
export function markTaskErrored(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskErrorEvent, ms * 1000, task.id]);
}
}
}
export function markTaskRun(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
runIdCounter++;
profilingState[PRIORITY] = task.priorityLevel;
profilingState[CURRENT_TASK_ID] = task.id;
profilingState[CURRENT_RUN_ID] = runIdCounter;
if (eventLog !== null) {
logEvent([TaskRunEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markTaskYield(task: {id: number, ...}, ms: number) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[CURRENT_RUN_ID] = 0;
if (eventLog !== null) {
logEvent([TaskYieldEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markSchedulerSuspended(ms: number) {
if (enableProfiling) {
mainThreadIdCounter++;
if (eventLog !== null) {
logEvent([SchedulerSuspendEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
export function markSchedulerUnsuspended(ms: number) {
if (enableProfiling) {
if (eventLog !== null) {
logEvent([SchedulerResumeEvent, ms * 1000, mainThreadIdCounter]);
}
}
} | let eventLogSize = 0;
let eventLogBuffer = null; | random_line_split |
SchedulerProfiling.js | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {PriorityLevel} from './SchedulerPriorities';
import {enableProfiling} from './SchedulerFeatureFlags';
import {NoPriority} from './SchedulerPriorities';
let runIdCounter: number = 0;
let mainThreadIdCounter: number = 0;
const profilingStateSize = 4;
export const sharedProfilingBuffer = enableProfiling
? // $FlowFixMe Flow doesn't know about SharedArrayBuffer
typeof SharedArrayBuffer === 'function'
? new SharedArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: // $FlowFixMe Flow doesn't know about ArrayBuffer
typeof ArrayBuffer === 'function'
? new ArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: null // Don't crash the init path on IE9
: null;
const profilingState =
enableProfiling && sharedProfilingBuffer !== null
? new Int32Array(sharedProfilingBuffer)
: []; // We can't read this but it helps save bytes for null checks
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
// This is maintained with a counter, because the size of the priority queue
// array might include canceled tasks.
profilingState[QUEUE_SIZE] = 0;
profilingState[CURRENT_TASK_ID] = 0;
}
// Bytes per element is 4
const INITIAL_EVENT_LOG_SIZE = 131072;
const MAX_EVENT_LOG_SIZE = 524288; // Equivalent to 2 megabytes
let eventLogSize = 0;
let eventLogBuffer = null;
let eventLog = null;
let eventLogIndex = 0;
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function logEvent(entries) {
if (eventLog !== null) {
const offset = eventLogIndex;
eventLogIndex += entries.length;
if (eventLogIndex + 1 > eventLogSize) {
eventLogSize *= 2;
if (eventLogSize > MAX_EVENT_LOG_SIZE) {
// Using console['error'] to evade Babel and ESLint
console['error'](
"Scheduler Profiling: Event log exceeded maximum size. Don't " +
'forget to call `stopLoggingProfilingEvents()`.',
);
stopLoggingProfilingEvents();
return;
}
const newEventLog = new Int32Array(eventLogSize * 4);
newEventLog.set(eventLog);
eventLogBuffer = newEventLog.buffer;
eventLog = newEventLog;
}
eventLog.set(entries, offset);
}
}
export function startLoggingProfilingEvents(): void {
eventLogSize = INITIAL_EVENT_LOG_SIZE;
eventLogBuffer = new ArrayBuffer(eventLogSize * 4);
eventLog = new Int32Array(eventLogBuffer);
eventLogIndex = 0;
}
export function stopLoggingProfilingEvents(): ArrayBuffer | null {
const buffer = eventLogBuffer;
eventLogSize = 0;
eventLogBuffer = null;
eventLog = null;
eventLogIndex = 0;
return buffer;
}
export function markTaskStart(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]++;
if (eventLog !== null) {
// performance.now returns a float, representing milliseconds. When the
// event is logged, it's coerced to an int. Convert to microseconds to
// maintain extra degrees of precision.
logEvent([TaskStartEvent, ms * 1000, task.id, task.priorityLevel]);
}
}
}
export function markTaskCompleted(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState | --;
if (eventLog !== null) {
logEvent([TaskCompleteEvent, ms * 1000, task.id]);
}
}
}
export function markTaskCanceled(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCancelEvent, ms * 1000, task.id]);
}
}
}
export function markTaskErrored(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskErrorEvent, ms * 1000, task.id]);
}
}
}
export function markTaskRun(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
runIdCounter++;
profilingState[PRIORITY] = task.priorityLevel;
profilingState[CURRENT_TASK_ID] = task.id;
profilingState[CURRENT_RUN_ID] = runIdCounter;
if (eventLog !== null) {
logEvent([TaskRunEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markTaskYield(task: {id: number, ...}, ms: number) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[CURRENT_RUN_ID] = 0;
if (eventLog !== null) {
logEvent([TaskYieldEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markSchedulerSuspended(ms: number) {
if (enableProfiling) {
mainThreadIdCounter++;
if (eventLog !== null) {
logEvent([SchedulerSuspendEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
export function markSchedulerUnsuspended(ms: number) {
if (enableProfiling) {
if (eventLog !== null) {
logEvent([SchedulerResumeEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
| [QUEUE_SIZE] | identifier_name |
SchedulerProfiling.js | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {PriorityLevel} from './SchedulerPriorities';
import {enableProfiling} from './SchedulerFeatureFlags';
import {NoPriority} from './SchedulerPriorities';
let runIdCounter: number = 0;
let mainThreadIdCounter: number = 0;
const profilingStateSize = 4;
export const sharedProfilingBuffer = enableProfiling
? // $FlowFixMe Flow doesn't know about SharedArrayBuffer
typeof SharedArrayBuffer === 'function'
? new SharedArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: // $FlowFixMe Flow doesn't know about ArrayBuffer
typeof ArrayBuffer === 'function'
? new ArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: null // Don't crash the init path on IE9
: null;
const profilingState =
enableProfiling && sharedProfilingBuffer !== null
? new Int32Array(sharedProfilingBuffer)
: []; // We can't read this but it helps save bytes for null checks
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
// This is maintained with a counter, because the size of the priority queue
// array might include canceled tasks.
profilingState[QUEUE_SIZE] = 0;
profilingState[CURRENT_TASK_ID] = 0;
}
// Bytes per element is 4
const INITIAL_EVENT_LOG_SIZE = 131072;
const MAX_EVENT_LOG_SIZE = 524288; // Equivalent to 2 megabytes
let eventLogSize = 0;
let eventLogBuffer = null;
let eventLog = null;
let eventLogIndex = 0;
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function logEvent(entries) {
if (eventLog !== null) {
const offset = eventLogIndex;
eventLogIndex += entries.length;
if (eventLogIndex + 1 > eventLogSize) {
eventLogSize *= 2;
if (eventLogSize > MAX_EVENT_LOG_SIZE) {
// Using console['error'] to evade Babel and ESLint
console['error'](
"Scheduler Profiling: Event log exceeded maximum size. Don't " +
'forget to call `stopLoggingProfilingEvents()`.',
);
stopLoggingProfilingEvents();
return;
}
const newEventLog = new Int32Array(eventLogSize * 4);
newEventLog.set(eventLog);
eventLogBuffer = newEventLog.buffer;
eventLog = newEventLog;
}
eventLog.set(entries, offset);
}
}
export function startLoggingProfilingEvents(): void |
export function stopLoggingProfilingEvents(): ArrayBuffer | null {
const buffer = eventLogBuffer;
eventLogSize = 0;
eventLogBuffer = null;
eventLog = null;
eventLogIndex = 0;
return buffer;
}
export function markTaskStart(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]++;
if (eventLog !== null) {
// performance.now returns a float, representing milliseconds. When the
// event is logged, it's coerced to an int. Convert to microseconds to
// maintain extra degrees of precision.
logEvent([TaskStartEvent, ms * 1000, task.id, task.priorityLevel]);
}
}
}
export function markTaskCompleted(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCompleteEvent, ms * 1000, task.id]);
}
}
}
export function markTaskCanceled(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCancelEvent, ms * 1000, task.id]);
}
}
}
export function markTaskErrored(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskErrorEvent, ms * 1000, task.id]);
}
}
}
export function markTaskRun(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
runIdCounter++;
profilingState[PRIORITY] = task.priorityLevel;
profilingState[CURRENT_TASK_ID] = task.id;
profilingState[CURRENT_RUN_ID] = runIdCounter;
if (eventLog !== null) {
logEvent([TaskRunEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markTaskYield(task: {id: number, ...}, ms: number) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[CURRENT_RUN_ID] = 0;
if (eventLog !== null) {
logEvent([TaskYieldEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markSchedulerSuspended(ms: number) {
if (enableProfiling) {
mainThreadIdCounter++;
if (eventLog !== null) {
logEvent([SchedulerSuspendEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
export function markSchedulerUnsuspended(ms: number) {
if (enableProfiling) {
if (eventLog !== null) {
logEvent([SchedulerResumeEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
| {
eventLogSize = INITIAL_EVENT_LOG_SIZE;
eventLogBuffer = new ArrayBuffer(eventLogSize * 4);
eventLog = new Int32Array(eventLogBuffer);
eventLogIndex = 0;
} | identifier_body |
SchedulerProfiling.js | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {PriorityLevel} from './SchedulerPriorities';
import {enableProfiling} from './SchedulerFeatureFlags';
import {NoPriority} from './SchedulerPriorities';
let runIdCounter: number = 0;
let mainThreadIdCounter: number = 0;
const profilingStateSize = 4;
export const sharedProfilingBuffer = enableProfiling
? // $FlowFixMe Flow doesn't know about SharedArrayBuffer
typeof SharedArrayBuffer === 'function'
? new SharedArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: // $FlowFixMe Flow doesn't know about ArrayBuffer
typeof ArrayBuffer === 'function'
? new ArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: null // Don't crash the init path on IE9
: null;
const profilingState =
enableProfiling && sharedProfilingBuffer !== null
? new Int32Array(sharedProfilingBuffer)
: []; // We can't read this but it helps save bytes for null checks
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
// This is maintained with a counter, because the size of the priority queue
// array might include canceled tasks.
profilingState[QUEUE_SIZE] = 0;
profilingState[CURRENT_TASK_ID] = 0;
}
// Bytes per element is 4
const INITIAL_EVENT_LOG_SIZE = 131072;
const MAX_EVENT_LOG_SIZE = 524288; // Equivalent to 2 megabytes
let eventLogSize = 0;
let eventLogBuffer = null;
let eventLog = null;
let eventLogIndex = 0;
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function logEvent(entries) {
if (eventLog !== null) {
const offset = eventLogIndex;
eventLogIndex += entries.length;
if (eventLogIndex + 1 > eventLogSize) {
eventLogSize *= 2;
if (eventLogSize > MAX_EVENT_LOG_SIZE) {
// Using console['error'] to evade Babel and ESLint
console['error'](
"Scheduler Profiling: Event log exceeded maximum size. Don't " +
'forget to call `stopLoggingProfilingEvents()`.',
);
stopLoggingProfilingEvents();
return;
}
const newEventLog = new Int32Array(eventLogSize * 4);
newEventLog.set(eventLog);
eventLogBuffer = newEventLog.buffer;
eventLog = newEventLog;
}
eventLog.set(entries, offset);
}
}
export function startLoggingProfilingEvents(): void {
eventLogSize = INITIAL_EVENT_LOG_SIZE;
eventLogBuffer = new ArrayBuffer(eventLogSize * 4);
eventLog = new Int32Array(eventLogBuffer);
eventLogIndex = 0;
}
export function stopLoggingProfilingEvents(): ArrayBuffer | null {
const buffer = eventLogBuffer;
eventLogSize = 0;
eventLogBuffer = null;
eventLog = null;
eventLogIndex = 0;
return buffer;
}
export function markTaskStart(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]++;
if (eventLog !== null) {
// performance.now returns a float, representing milliseconds. When the
// event is logged, it's coerced to an int. Convert to microseconds to
// maintain extra degrees of precision.
logEvent([TaskStartEvent, ms * 1000, task.id, task.priorityLevel]);
}
}
}
export function markTaskCompleted(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCompleteEvent, ms * 1000, task.id]);
}
}
}
export function markTaskCanceled(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskCancelEvent, ms * 1000, task.id]);
}
}
}
export function markTaskErrored(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
if (eventLog !== null) {
logEvent([TaskErrorEvent, ms * 1000, task.id]);
}
}
}
export function markTaskRun(
task: {
id: number,
priorityLevel: PriorityLevel,
...
},
ms: number,
) {
if (enableProfiling) {
runIdCounter++;
profilingState[PRIORITY] = task.priorityLevel;
profilingState[CURRENT_TASK_ID] = task.id;
profilingState[CURRENT_RUN_ID] = runIdCounter;
if (eventLog !== null) {
logEvent([TaskRunEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markTaskYield(task: {id: number, ...}, ms: number) {
if (enableProfiling) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[CURRENT_RUN_ID] = 0;
if (eventLog !== null) {
logEvent([TaskYieldEvent, ms * 1000, task.id, runIdCounter]);
}
}
}
export function markSchedulerSuspended(ms: number) {
if (enableProfiling) {
mainThreadIdCounter++;
if (eventLog !== null) |
}
}
export function markSchedulerUnsuspended(ms: number) {
if (enableProfiling) {
if (eventLog !== null) {
logEvent([SchedulerResumeEvent, ms * 1000, mainThreadIdCounter]);
}
}
}
| {
logEvent([SchedulerSuspendEvent, ms * 1000, mainThreadIdCounter]);
} | conditional_block |
opportunity_kraken.py | from exchanges import helpers
from exchanges import kraken
from decimal import Decimal
### Kraken opportunities
#### ARBITRAGE OPPORTUNITY 1
def | ():
sellLTCbuyEUR = kraken.get_current_bid_LTCEUR()
sellEURbuyXBT = kraken.get_current_ask_XBTEUR()
sellXBTbuyLTC = kraken.get_current_ask_XBTLTC()
opport = 1-((sellLTCbuyEUR/sellEURbuyBTX)*sellXBTbuyLTC)
return Decimal(opport)
def opportunity_2():
sellEURbuyLTC = kraken.get_current_ask_LTCEUR()
sellLTCbuyXBT = kraken.get_current_ask_XBTLTC()
sellXBTbuyEUR = kraken.get_current_bid_XBTEUR()
opport = 1-(((1/sellEURbuyLTC)/sellLTCbuyXBT)*sellXBTbuyEUR)
return Decimal(opport) | opportunity_1 | identifier_name |
opportunity_kraken.py | from exchanges import helpers
from exchanges import kraken
from decimal import Decimal
### Kraken opportunities
#### ARBITRAGE OPPORTUNITY 1 | opport = 1-((sellLTCbuyEUR/sellEURbuyBTX)*sellXBTbuyLTC)
return Decimal(opport)
def opportunity_2():
sellEURbuyLTC = kraken.get_current_ask_LTCEUR()
sellLTCbuyXBT = kraken.get_current_ask_XBTLTC()
sellXBTbuyEUR = kraken.get_current_bid_XBTEUR()
opport = 1-(((1/sellEURbuyLTC)/sellLTCbuyXBT)*sellXBTbuyEUR)
return Decimal(opport) | def opportunity_1():
sellLTCbuyEUR = kraken.get_current_bid_LTCEUR()
sellEURbuyXBT = kraken.get_current_ask_XBTEUR()
sellXBTbuyLTC = kraken.get_current_ask_XBTLTC() | random_line_split |
opportunity_kraken.py | from exchanges import helpers
from exchanges import kraken
from decimal import Decimal
### Kraken opportunities
#### ARBITRAGE OPPORTUNITY 1
def opportunity_1():
sellLTCbuyEUR = kraken.get_current_bid_LTCEUR()
sellEURbuyXBT = kraken.get_current_ask_XBTEUR()
sellXBTbuyLTC = kraken.get_current_ask_XBTLTC()
opport = 1-((sellLTCbuyEUR/sellEURbuyBTX)*sellXBTbuyLTC)
return Decimal(opport)
def opportunity_2():
| sellEURbuyLTC = kraken.get_current_ask_LTCEUR()
sellLTCbuyXBT = kraken.get_current_ask_XBTLTC()
sellXBTbuyEUR = kraken.get_current_bid_XBTEUR()
opport = 1-(((1/sellEURbuyLTC)/sellLTCbuyXBT)*sellXBTbuyEUR)
return Decimal(opport) | identifier_body |
|
lib.fluidContent.ts | # Default configuration for content elements which are using FLUIDTEMPLATE directly
lib.fluidContent >
lib.fluidContent = FLUIDTEMPLATE
lib.fluidContent {
templateName = Default
templateRootPaths {
0 = EXT:fluid_styled_content/Resources/Private/Templates/
10 = {$styles.templates.templateRootPath}
}
partialRootPaths {
0 = EXT:fluid_styled_content/Resources/Private/Partials/
10 = {$styles.templates.partialRootPath}
}
layoutRootPaths {
0 = EXT:fluid_styled_content/Resources/Private/Layouts/
10 = {$styles.templates.layoutRootPath} | media {
popup {
bodyTag = <body style="margin:0; background:#fff;">
wrap = <a href="javascript:close();"> | </a>
width = {$styles.content.textmedia.linkWrap.width}
height = {$styles.content.textmedia.linkWrap.height}
JSwindow = 1
JSwindow {
newWindow = {$styles.content.textmedia.linkWrap.newWindow}
if.isFalse = {$styles.content.textmedia.linkWrap.lightboxEnabled}
}
directImageLink = {$styles.content.textmedia.linkWrap.lightboxEnabled}
linkParams.ATagParams.dataWrap = class="{$styles.content.textmedia.linkWrap.lightboxCssClass}" rel="{$styles.content.textmedia.linkWrap.lightboxRelAttribute}"
}
}
}
} | }
settings {
defaultHeaderType = {$styles.content.defaultHeaderType}
| random_line_split |
dev-server.ts | import express, { Router } from 'express';
import { Builder, logConfig, Options } from '@storybook/core-common';
import { getMiddleware } from './utils/middleware';
import { getServerAddresses } from './utils/server-address';
import { getServer } from './utils/server-init';
import { useStatics } from './utils/server-statics';
import * as managerBuilder from './manager/builder';
import { openInBrowser } from './utils/open-in-browser';
import { getPreviewBuilder } from './utils/get-preview-builder';
// @ts-ignore
export const router: Router = new Router();
export async function storybookDevServer(options: Options) | {
const startTime = process.hrtime();
const app = express();
const server = await getServer(app, options);
if (typeof options.extendServer === 'function') {
options.extendServer(server);
}
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
next();
});
// User's own static files
await useStatics(router, options);
getMiddleware(options.configDir)(router);
app.use(router);
const { port, host } = options;
const proto = options.https ? 'https' : 'http';
const { address, networkAddress } = getServerAddresses(port, host, proto);
await new Promise<void>((resolve, reject) => {
// FIXME: Following line doesn't match TypeScript signature at all 🤔
// @ts-ignore
server.listen({ port, host }, (error: Error) => (error ? reject(error) : resolve()));
});
const previewBuilder: Builder<unknown, unknown> = await getPreviewBuilder(options.configDir);
if (options.debugWebpack) {
logConfig('Preview webpack config', await previewBuilder.getConfig(options));
logConfig('Manager webpack config', await managerBuilder.getConfig(options));
}
const preview = options.ignorePreview
? Promise.resolve()
: previewBuilder.start({
startTime,
options,
router,
});
const manager = managerBuilder.start({
startTime,
options,
router,
});
const [previewResult, managerResult] = await Promise.all([
preview,
manager
// TODO #13083 Restore this when compiling the preview is fast enough
// .then((result) => {
// if (!options.ci && !options.smokeTest) openInBrowser(address);
// return result;
// })
.catch(previewBuilder.bail),
]);
// TODO #13083 Remove this when compiling the preview is fast enough
if (!options.ci && !options.smokeTest) openInBrowser(host ? networkAddress : address);
return { previewResult, managerResult, address, networkAddress };
}
| identifier_body |
|
dev-server.ts | import express, { Router } from 'express';
import { Builder, logConfig, Options } from '@storybook/core-common';
import { getMiddleware } from './utils/middleware';
import { getServerAddresses } from './utils/server-address';
import { getServer } from './utils/server-init';
import { useStatics } from './utils/server-statics';
import * as managerBuilder from './manager/builder';
import { openInBrowser } from './utils/open-in-browser';
import { getPreviewBuilder } from './utils/get-preview-builder';
// @ts-ignore
export const router: Router = new Router();
export async function storybookDevServer(options: Options) {
const startTime = process.hrtime();
const app = express();
const server = await getServer(app, options);
if (typeof options.extendServer === 'function') {
options.extendServer(server);
}
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
next();
});
// User's own static files
await useStatics(router, options);
getMiddleware(options.configDir)(router);
app.use(router);
const { port, host } = options;
const proto = options.https ? 'https' : 'http';
const { address, networkAddress } = getServerAddresses(port, host, proto);
await new Promise<void>((resolve, reject) => {
// FIXME: Following line doesn't match TypeScript signature at all 🤔
// @ts-ignore
server.listen({ port, host }, (error: Error) => (error ? reject(error) : resolve()));
});
const previewBuilder: Builder<unknown, unknown> = await getPreviewBuilder(options.configDir);
if (options.debugWebpack) {
logConfig('Preview webpack config', await previewBuilder.getConfig(options));
logConfig('Manager webpack config', await managerBuilder.getConfig(options));
}
const preview = options.ignorePreview
? Promise.resolve()
: previewBuilder.start({
startTime,
options,
router,
});
const manager = managerBuilder.start({
startTime,
options,
router,
});
const [previewResult, managerResult] = await Promise.all([
preview,
manager
// TODO #13083 Restore this when compiling the preview is fast enough
// .then((result) => {
// if (!options.ci && !options.smokeTest) openInBrowser(address);
// return result;
// })
.catch(previewBuilder.bail),
]);
// TODO #13083 Remove this when compiling the preview is fast enough
if (!options.ci && !options.smokeTest) openInBrowser(host ? networkAddress : address); | } |
return { previewResult, managerResult, address, networkAddress }; | random_line_split |
dev-server.ts | import express, { Router } from 'express';
import { Builder, logConfig, Options } from '@storybook/core-common';
import { getMiddleware } from './utils/middleware';
import { getServerAddresses } from './utils/server-address';
import { getServer } from './utils/server-init';
import { useStatics } from './utils/server-statics';
import * as managerBuilder from './manager/builder';
import { openInBrowser } from './utils/open-in-browser';
import { getPreviewBuilder } from './utils/get-preview-builder';
// @ts-ignore
export const router: Router = new Router();
export async function | (options: Options) {
const startTime = process.hrtime();
const app = express();
const server = await getServer(app, options);
if (typeof options.extendServer === 'function') {
options.extendServer(server);
}
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
next();
});
// User's own static files
await useStatics(router, options);
getMiddleware(options.configDir)(router);
app.use(router);
const { port, host } = options;
const proto = options.https ? 'https' : 'http';
const { address, networkAddress } = getServerAddresses(port, host, proto);
await new Promise<void>((resolve, reject) => {
// FIXME: Following line doesn't match TypeScript signature at all 🤔
// @ts-ignore
server.listen({ port, host }, (error: Error) => (error ? reject(error) : resolve()));
});
const previewBuilder: Builder<unknown, unknown> = await getPreviewBuilder(options.configDir);
if (options.debugWebpack) {
logConfig('Preview webpack config', await previewBuilder.getConfig(options));
logConfig('Manager webpack config', await managerBuilder.getConfig(options));
}
const preview = options.ignorePreview
? Promise.resolve()
: previewBuilder.start({
startTime,
options,
router,
});
const manager = managerBuilder.start({
startTime,
options,
router,
});
const [previewResult, managerResult] = await Promise.all([
preview,
manager
// TODO #13083 Restore this when compiling the preview is fast enough
// .then((result) => {
// if (!options.ci && !options.smokeTest) openInBrowser(address);
// return result;
// })
.catch(previewBuilder.bail),
]);
// TODO #13083 Remove this when compiling the preview is fast enough
if (!options.ci && !options.smokeTest) openInBrowser(host ? networkAddress : address);
return { previewResult, managerResult, address, networkAddress };
}
| storybookDevServer | identifier_name |
dev-server.ts | import express, { Router } from 'express';
import { Builder, logConfig, Options } from '@storybook/core-common';
import { getMiddleware } from './utils/middleware';
import { getServerAddresses } from './utils/server-address';
import { getServer } from './utils/server-init';
import { useStatics } from './utils/server-statics';
import * as managerBuilder from './manager/builder';
import { openInBrowser } from './utils/open-in-browser';
import { getPreviewBuilder } from './utils/get-preview-builder';
// @ts-ignore
export const router: Router = new Router();
export async function storybookDevServer(options: Options) {
const startTime = process.hrtime();
const app = express();
const server = await getServer(app, options);
if (typeof options.extendServer === 'function') {
options.extendServer(server);
}
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
next();
});
// User's own static files
await useStatics(router, options);
getMiddleware(options.configDir)(router);
app.use(router);
const { port, host } = options;
const proto = options.https ? 'https' : 'http';
const { address, networkAddress } = getServerAddresses(port, host, proto);
await new Promise<void>((resolve, reject) => {
// FIXME: Following line doesn't match TypeScript signature at all 🤔
// @ts-ignore
server.listen({ port, host }, (error: Error) => (error ? reject(error) : resolve()));
});
const previewBuilder: Builder<unknown, unknown> = await getPreviewBuilder(options.configDir);
if (options.debugWebpack) {
| const preview = options.ignorePreview
? Promise.resolve()
: previewBuilder.start({
startTime,
options,
router,
});
const manager = managerBuilder.start({
startTime,
options,
router,
});
const [previewResult, managerResult] = await Promise.all([
preview,
manager
// TODO #13083 Restore this when compiling the preview is fast enough
// .then((result) => {
// if (!options.ci && !options.smokeTest) openInBrowser(address);
// return result;
// })
.catch(previewBuilder.bail),
]);
// TODO #13083 Remove this when compiling the preview is fast enough
if (!options.ci && !options.smokeTest) openInBrowser(host ? networkAddress : address);
return { previewResult, managerResult, address, networkAddress };
}
| logConfig('Preview webpack config', await previewBuilder.getConfig(options));
logConfig('Manager webpack config', await managerBuilder.getConfig(options));
}
| conditional_block |
005_tle_error.py | class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1 | # center is ch right
delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def test(self):
assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace'
if __name__ == '__main__':
s = Solution()
s.test() | max_str = s[i-count:i+1+count]
| random_line_split |
005_tle_error.py | class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1
max_str = s[i-count:i+1+count]
# center is ch right
delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def test(self):
assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace'
if __name__ == '__main__':
| s = Solution()
s.test() | conditional_block |
|
005_tle_error.py | class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1
max_str = s[i-count:i+1+count]
# center is ch right
delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def | (self):
assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace'
if __name__ == '__main__':
s = Solution()
s.test()
| test | identifier_name |
005_tle_error.py | class Solution(object):
def longestPalindrome(self, s):
max_len = 0
max_str = ''
if len(s) <= 2:
return s
for i, ch in enumerate(s):
delta = 1
count = 0
# center is ch
while (i - delta) >= 0 and (i + delta) < len(s):
if s[i-delta] != s[i+delta]:
break
count += 1
delta += 1
if count * 2 + 1 > max_len:
max_len = count * 2 + 1
max_str = s[i-count:i+1+count]
# center is ch right
delta = 0.5
count = 0
j = i + 0.5
while (j - delta) >= 0 and (j + delta) < len(s):
if s[int(j - delta)] != s[int(j + delta)]:
break
count += 1
delta += 1
if count * 2 > max_len:
max_len = count * 2
max_str = s[i-count+1:i+count+1]
return max_str
def test(self):
|
if __name__ == '__main__':
s = Solution()
s.test()
| assert self.longestPalindrome('a') == 'a'
assert self.longestPalindrome('abcba') == 'abcba'
assert self.longestPalindrome('eabcbae') == 'eabcbae'
assert self.longestPalindrome('abba') == 'abba'
assert self.longestPalindrome('abbc') == 'bb'
assert self.longestPalindrome('dbabba') == 'abba'
assert self.longestPalindrome('decababace') == 'ecababace'
assert self.longestPalindrome('decababaceehgagbgnag') == 'ecababace' | identifier_body |
settings.py | """
Django settings for paulpruitt_net project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from secrets import SECRET_KEY, DB_USER, DB_PASSWORD
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'taggit',
'pblog'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'paulpruitt_net.urls'
WSGI_APPLICATION = 'paulpruitt_net.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE' : 'django.db.backends.postgresql_psycopg2',
'NAME' : 'site',
'USER' : DB_USER,
'PASSWORD': DB_PASSWORD,
'HOST' : '127.0.0.1',
'PORT' : '',
} | # Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/srv/www/site/static'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
# Do not allow IFrames
X_FRAME_OPTIONS = 'DENY' | }
| random_line_split |
violations.py | """Docstring violation definition."""
from collections import namedtuple
from functools import partial
from itertools import dropwhile
from typing import Any, Callable, Iterable, List, Optional
from .parser import Definition
from .utils import is_blank
__all__ = ('Error', 'ErrorRegistry', 'conventions')
ErrorParams = namedtuple('ErrorParams', ['code', 'short_desc', 'context'])
class Error:
"""Error in docstring style."""
# Options that define how errors are printed:
explain = False
source = False
def __init__(
self,
code: str,
short_desc: str,
context: str,
*parameters: Iterable[str],
) -> None:
"""Initialize the object.
`parameters` are specific to the created error.
"""
self.code = code
self.short_desc = short_desc
self.context = context
self.parameters = parameters
self.definition = None # type: Optional[Definition]
self.explanation = None # type: Optional[str]
def set_context(self, definition: Definition, explanation: str) -> None:
"""Set the source code context for this error."""
self.definition = definition
self.explanation = explanation
filename = property(lambda self: self.definition.module.name)
line = property(lambda self: self.definition.error_lineno)
@property
def message(self) -> str:
"""Return the message to print to the user."""
ret = f'{self.code}: {self.short_desc}'
if self.context is not None:
specific_error_msg = self.context.format(*self.parameters)
ret += f' ({specific_error_msg})'
return ret
@property
def lines(self) -> str:
"""Return the source code lines for this error."""
if self.definition is None:
return ''
source = ''
lines = self.definition.source.splitlines(keepends=True)
offset = self.definition.start # type: ignore
lines_stripped = list(
reversed(list(dropwhile(is_blank, reversed(lines))))
)
numbers_width = len(str(offset + len(lines_stripped)))
line_format = f'{{:{numbers_width}}}:{{}}'
for n, line in enumerate(lines_stripped):
if line:
line = ' ' + line
source += line_format.format(n + offset, line)
if n > 5:
source += ' ...\n'
break
return source
def __str__(self) -> str:
if self.explanation:
self.explanation = '\n'.join(
l for l in self.explanation.split('\n') if not is_blank(l)
)
template = '{filename}:{line} {definition}:\n {message}'
if self.source and self.explain:
template += '\n\n{explanation}\n\n{lines}\n'
elif self.source and not self.explain:
template += '\n\n{lines}\n'
elif self.explain and not self.source:
template += '\n\n{explanation}\n\n'
return template.format(
**{
name: getattr(self, name)
for name in [
'filename',
'line',
'definition',
'message',
'explanation',
'lines',
]
}
)
def __repr__(self) -> str:
return str(self)
def __lt__(self, other: 'Error') -> bool:
return (self.filename, self.line) < (other.filename, other.line)
class ErrorRegistry:
"""A registry of all error codes, divided to groups."""
groups = [] # type: ignore
class ErrorGroup:
"""A group of similarly themed errors."""
def __init__(self, prefix: str, name: str) -> None:
"""Initialize the object.
`Prefix` should be the common prefix for errors in this group,
e.g., "D1".
`name` is the name of the group (its subject).
"""
self.prefix = prefix
self.name = name
self.errors = [] # type: List[ErrorParams]
def create_error(
self,
error_code: str,
error_desc: str,
error_context: Optional[str] = None,
) -> Callable[[Iterable[str]], Error]:
"""Create an error, register it to this group and return it."""
# TODO: check prefix
error_params = ErrorParams(error_code, error_desc, error_context)
factory = partial(Error, *error_params)
self.errors.append(error_params)
return factory
@classmethod
def create_group(cls, prefix: str, name: str) -> ErrorGroup:
"""Create a new error group and return it."""
group = cls.ErrorGroup(prefix, name)
cls.groups.append(group)
return group
@classmethod
def get_error_codes(cls) -> Iterable[str]:
|
@classmethod
def to_rst(cls) -> str:
"""Output the registry as reStructuredText, for documentation."""
max_len = max(
len(error.short_desc)
for group in cls.groups
for error in group.errors
)
sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n'
blank_line = '|' + (max_len + 9) * ' ' + '|\n'
table = ''
for group in cls.groups:
table += sep_line
table += blank_line
table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n'
table += blank_line
for error in group.errors:
table += sep_line
table += (
'|'
+ error.code.center(6)
+ '| '
+ error.short_desc.ljust(max_len + 1)
+ '|\n'
)
table += sep_line
return table
D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings')
D100 = D1xx.create_error(
'D100',
'Missing docstring in public module',
)
D101 = D1xx.create_error(
'D101',
'Missing docstring in public class',
)
D102 = D1xx.create_error(
'D102',
'Missing docstring in public method',
)
D103 = D1xx.create_error(
'D103',
'Missing docstring in public function',
)
D104 = D1xx.create_error(
'D104',
'Missing docstring in public package',
)
D105 = D1xx.create_error(
'D105',
'Missing docstring in magic method',
)
D106 = D1xx.create_error(
'D106',
'Missing docstring in public nested class',
)
D107 = D1xx.create_error(
'D107',
'Missing docstring in __init__',
)
D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues')
D200 = D2xx.create_error(
'D200',
'One-line docstring should fit on one line ' 'with quotes',
'found {0}',
)
D201 = D2xx.create_error(
'D201',
'No blank lines allowed before function docstring',
'found {0}',
)
D202 = D2xx.create_error(
'D202',
'No blank lines allowed after function docstring',
'found {0}',
)
D203 = D2xx.create_error(
'D203',
'1 blank line required before class docstring',
'found {0}',
)
D204 = D2xx.create_error(
'D204',
'1 blank line required after class docstring',
'found {0}',
)
D205 = D2xx.create_error(
'D205',
'1 blank line required between summary line and description',
'found {0}',
)
D206 = D2xx.create_error(
'D206',
'Docstring should be indented with spaces, not tabs',
)
D207 = D2xx.create_error(
'D207',
'Docstring is under-indented',
)
D208 = D2xx.create_error(
'D208',
'Docstring is over-indented',
)
D209 = D2xx.create_error(
'D209',
'Multi-line docstring closing quotes should be on a separate line',
)
D210 = D2xx.create_error(
'D210',
'No whitespaces allowed surrounding docstring text',
)
D211 = D2xx.create_error(
'D211',
'No blank lines allowed before class docstring',
'found {0}',
)
D212 = D2xx.create_error(
'D212',
'Multi-line docstring summary should start at the first line',
)
D213 = D2xx.create_error(
'D213',
'Multi-line docstring summary should start at the second line',
)
D214 = D2xx.create_error(
'D214',
'Section is over-indented',
'{0!r}',
)
D215 = D2xx.create_error(
'D215',
'Section underline is over-indented',
'in section {0!r}',
)
D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues')
D300 = D3xx.create_error(
'D300',
'Use """triple double quotes"""',
'found {0}-quotes',
)
D301 = D3xx.create_error(
'D301',
'Use r""" if any backslashes in a docstring',
)
D302 = D3xx.create_error(
'D302',
'Deprecated: Use u""" for Unicode docstrings',
)
D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues')
D400 = D4xx.create_error(
'D400',
'First line should end with a period',
'not {0!r}',
)
D401 = D4xx.create_error(
'D401',
'First line should be in imperative mood',
"perhaps '{0}', not '{1}'",
)
D401b = D4xx.create_error(
'D401',
'First line should be in imperative mood; try rephrasing',
"found '{0}'",
)
D402 = D4xx.create_error(
'D402',
'First line should not be the function\'s "signature"',
)
D403 = D4xx.create_error(
'D403',
'First word of the first line should be properly capitalized',
'{0!r}, not {1!r}',
)
D404 = D4xx.create_error(
'D404',
'First word of the docstring should not be `This`',
)
D405 = D4xx.create_error(
'D405',
'Section name should be properly capitalized',
'{0!r}, not {1!r}',
)
D406 = D4xx.create_error(
'D406',
'Section name should end with a newline',
'{0!r}, not {1!r}',
)
D407 = D4xx.create_error(
'D407',
'Missing dashed underline after section',
'{0!r}',
)
D408 = D4xx.create_error(
'D408',
'Section underline should be in the line following the section\'s name',
'{0!r}',
)
D409 = D4xx.create_error(
'D409',
'Section underline should match the length of its name',
'Expected {0!r} dashes in section {1!r}, got {2!r}',
)
D410 = D4xx.create_error(
'D410',
'Missing blank line after section',
'{0!r}',
)
D411 = D4xx.create_error(
'D411',
'Missing blank line before section',
'{0!r}',
)
D412 = D4xx.create_error(
'D412',
'No blank lines allowed between a section header and its content',
'{0!r}',
)
D413 = D4xx.create_error(
'D413',
'Missing blank line after last section',
'{0!r}',
)
D414 = D4xx.create_error(
'D414',
'Section has no content',
'{0!r}',
)
D415 = D4xx.create_error(
'D415',
(
'First line should end with a period, question '
'mark, or exclamation point'
),
'not {0!r}',
)
D416 = D4xx.create_error(
'D416',
'Section name should end with a colon',
'{0!r}, not {1!r}',
)
D417 = D4xx.create_error(
'D417',
'Missing argument descriptions in the docstring',
'argument(s) {0} are missing descriptions in {1!r} docstring',
)
D418 = D4xx.create_error(
'D418',
'Function/ Method decorated with @overload shouldn\'t contain a docstring',
)
class AttrDict(dict):
def __getattr__(self, item: str) -> Any:
return self[item]
all_errors = set(ErrorRegistry.get_error_codes())
conventions = AttrDict(
{
'pep257': all_errors
- {
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
'D418',
},
'numpy': all_errors
- {
'D107',
'D203',
'D212',
'D213',
'D402',
'D413',
'D415',
'D416',
'D417',
},
'google': all_errors
- {
'D203',
'D204',
'D213',
'D215',
'D400',
'D401',
'D404',
'D406',
'D407',
'D408',
'D409',
'D413',
},
}
)
| """Yield all registered codes."""
for group in cls.groups:
for error in group.errors:
yield error.code | identifier_body |
violations.py | """Docstring violation definition."""
from collections import namedtuple
from functools import partial
from itertools import dropwhile
from typing import Any, Callable, Iterable, List, Optional
from .parser import Definition
from .utils import is_blank
__all__ = ('Error', 'ErrorRegistry', 'conventions')
ErrorParams = namedtuple('ErrorParams', ['code', 'short_desc', 'context'])
class Error:
"""Error in docstring style."""
# Options that define how errors are printed:
explain = False
source = False
def __init__(
self,
code: str,
short_desc: str,
context: str,
*parameters: Iterable[str],
) -> None:
"""Initialize the object.
`parameters` are specific to the created error.
"""
self.code = code
self.short_desc = short_desc
self.context = context
self.parameters = parameters
self.definition = None # type: Optional[Definition]
self.explanation = None # type: Optional[str]
def set_context(self, definition: Definition, explanation: str) -> None:
"""Set the source code context for this error."""
self.definition = definition
self.explanation = explanation
filename = property(lambda self: self.definition.module.name)
line = property(lambda self: self.definition.error_lineno)
@property
def message(self) -> str:
"""Return the message to print to the user."""
ret = f'{self.code}: {self.short_desc}'
if self.context is not None:
specific_error_msg = self.context.format(*self.parameters)
ret += f' ({specific_error_msg})'
return ret
@property
def lines(self) -> str:
"""Return the source code lines for this error."""
if self.definition is None:
return ''
source = ''
lines = self.definition.source.splitlines(keepends=True)
offset = self.definition.start # type: ignore
lines_stripped = list(
reversed(list(dropwhile(is_blank, reversed(lines))))
)
numbers_width = len(str(offset + len(lines_stripped)))
line_format = f'{{:{numbers_width}}}:{{}}'
for n, line in enumerate(lines_stripped):
if line:
line = ' ' + line
source += line_format.format(n + offset, line)
if n > 5:
source += ' ...\n'
break
return source
def __str__(self) -> str:
if self.explanation:
self.explanation = '\n'.join(
l for l in self.explanation.split('\n') if not is_blank(l)
)
template = '{filename}:{line} {definition}:\n {message}'
if self.source and self.explain:
template += '\n\n{explanation}\n\n{lines}\n'
elif self.source and not self.explain:
template += '\n\n{lines}\n'
elif self.explain and not self.source:
template += '\n\n{explanation}\n\n'
return template.format(
**{
name: getattr(self, name)
for name in [
'filename',
'line',
'definition',
'message',
'explanation',
'lines',
]
}
)
def __repr__(self) -> str:
return str(self)
def __lt__(self, other: 'Error') -> bool:
return (self.filename, self.line) < (other.filename, other.line)
class ErrorRegistry:
"""A registry of all error codes, divided to groups."""
groups = [] # type: ignore
class ErrorGroup:
"""A group of similarly themed errors."""
def __init__(self, prefix: str, name: str) -> None:
"""Initialize the object.
`Prefix` should be the common prefix for errors in this group,
e.g., "D1".
`name` is the name of the group (its subject).
"""
self.prefix = prefix
self.name = name
self.errors = [] # type: List[ErrorParams]
def create_error(
self,
error_code: str,
error_desc: str,
error_context: Optional[str] = None,
) -> Callable[[Iterable[str]], Error]:
"""Create an error, register it to this group and return it."""
# TODO: check prefix
error_params = ErrorParams(error_code, error_desc, error_context)
factory = partial(Error, *error_params)
self.errors.append(error_params)
return factory
@classmethod
def create_group(cls, prefix: str, name: str) -> ErrorGroup:
"""Create a new error group and return it."""
group = cls.ErrorGroup(prefix, name)
cls.groups.append(group)
return group
@classmethod
def get_error_codes(cls) -> Iterable[str]:
"""Yield all registered codes."""
for group in cls.groups:
for error in group.errors:
yield error.code
@classmethod
def to_rst(cls) -> str:
"""Output the registry as reStructuredText, for documentation."""
max_len = max(
len(error.short_desc)
for group in cls.groups
for error in group.errors
)
sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n'
blank_line = '|' + (max_len + 9) * ' ' + '|\n'
table = ''
for group in cls.groups:
table += sep_line
table += blank_line
table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n'
table += blank_line
for error in group.errors:
table += sep_line
table += (
'|'
+ error.code.center(6)
+ '| '
+ error.short_desc.ljust(max_len + 1)
+ '|\n'
)
table += sep_line
return table
D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings')
D100 = D1xx.create_error(
'D100',
'Missing docstring in public module',
)
D101 = D1xx.create_error(
'D101',
'Missing docstring in public class',
)
D102 = D1xx.create_error(
'D102',
'Missing docstring in public method',
)
D103 = D1xx.create_error(
'D103',
'Missing docstring in public function',
)
D104 = D1xx.create_error(
'D104',
'Missing docstring in public package',
)
D105 = D1xx.create_error(
'D105',
'Missing docstring in magic method',
)
D106 = D1xx.create_error(
'D106',
'Missing docstring in public nested class',
)
D107 = D1xx.create_error(
'D107',
'Missing docstring in __init__',
)
D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues')
D200 = D2xx.create_error(
'D200',
'One-line docstring should fit on one line ' 'with quotes',
'found {0}',
)
D201 = D2xx.create_error(
'D201',
'No blank lines allowed before function docstring',
'found {0}',
)
D202 = D2xx.create_error(
'D202',
'No blank lines allowed after function docstring',
'found {0}',
)
D203 = D2xx.create_error(
'D203',
'1 blank line required before class docstring',
'found {0}',
)
D204 = D2xx.create_error(
'D204',
'1 blank line required after class docstring',
'found {0}',
)
D205 = D2xx.create_error(
'D205',
'1 blank line required between summary line and description',
'found {0}',
)
D206 = D2xx.create_error(
'D206',
'Docstring should be indented with spaces, not tabs',
)
D207 = D2xx.create_error(
'D207',
'Docstring is under-indented',
)
D208 = D2xx.create_error(
'D208',
'Docstring is over-indented',
)
D209 = D2xx.create_error(
'D209',
'Multi-line docstring closing quotes should be on a separate line',
)
D210 = D2xx.create_error(
'D210',
'No whitespaces allowed surrounding docstring text',
)
D211 = D2xx.create_error(
'D211',
'No blank lines allowed before class docstring',
'found {0}',
)
D212 = D2xx.create_error(
'D212',
'Multi-line docstring summary should start at the first line',
)
D213 = D2xx.create_error(
'D213',
'Multi-line docstring summary should start at the second line',
)
D214 = D2xx.create_error(
'D214',
'Section is over-indented',
'{0!r}',
) | 'D215',
'Section underline is over-indented',
'in section {0!r}',
)
D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues')
D300 = D3xx.create_error(
'D300',
'Use """triple double quotes"""',
'found {0}-quotes',
)
D301 = D3xx.create_error(
'D301',
'Use r""" if any backslashes in a docstring',
)
D302 = D3xx.create_error(
'D302',
'Deprecated: Use u""" for Unicode docstrings',
)
D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues')
D400 = D4xx.create_error(
'D400',
'First line should end with a period',
'not {0!r}',
)
D401 = D4xx.create_error(
'D401',
'First line should be in imperative mood',
"perhaps '{0}', not '{1}'",
)
D401b = D4xx.create_error(
'D401',
'First line should be in imperative mood; try rephrasing',
"found '{0}'",
)
D402 = D4xx.create_error(
'D402',
'First line should not be the function\'s "signature"',
)
D403 = D4xx.create_error(
'D403',
'First word of the first line should be properly capitalized',
'{0!r}, not {1!r}',
)
D404 = D4xx.create_error(
'D404',
'First word of the docstring should not be `This`',
)
D405 = D4xx.create_error(
'D405',
'Section name should be properly capitalized',
'{0!r}, not {1!r}',
)
D406 = D4xx.create_error(
'D406',
'Section name should end with a newline',
'{0!r}, not {1!r}',
)
D407 = D4xx.create_error(
'D407',
'Missing dashed underline after section',
'{0!r}',
)
D408 = D4xx.create_error(
'D408',
'Section underline should be in the line following the section\'s name',
'{0!r}',
)
D409 = D4xx.create_error(
'D409',
'Section underline should match the length of its name',
'Expected {0!r} dashes in section {1!r}, got {2!r}',
)
D410 = D4xx.create_error(
'D410',
'Missing blank line after section',
'{0!r}',
)
D411 = D4xx.create_error(
'D411',
'Missing blank line before section',
'{0!r}',
)
D412 = D4xx.create_error(
'D412',
'No blank lines allowed between a section header and its content',
'{0!r}',
)
D413 = D4xx.create_error(
'D413',
'Missing blank line after last section',
'{0!r}',
)
D414 = D4xx.create_error(
'D414',
'Section has no content',
'{0!r}',
)
D415 = D4xx.create_error(
'D415',
(
'First line should end with a period, question '
'mark, or exclamation point'
),
'not {0!r}',
)
D416 = D4xx.create_error(
'D416',
'Section name should end with a colon',
'{0!r}, not {1!r}',
)
D417 = D4xx.create_error(
'D417',
'Missing argument descriptions in the docstring',
'argument(s) {0} are missing descriptions in {1!r} docstring',
)
D418 = D4xx.create_error(
'D418',
'Function/ Method decorated with @overload shouldn\'t contain a docstring',
)
class AttrDict(dict):
def __getattr__(self, item: str) -> Any:
return self[item]
all_errors = set(ErrorRegistry.get_error_codes())
conventions = AttrDict(
{
'pep257': all_errors
- {
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
'D418',
},
'numpy': all_errors
- {
'D107',
'D203',
'D212',
'D213',
'D402',
'D413',
'D415',
'D416',
'D417',
},
'google': all_errors
- {
'D203',
'D204',
'D213',
'D215',
'D400',
'D401',
'D404',
'D406',
'D407',
'D408',
'D409',
'D413',
},
}
) | D215 = D2xx.create_error( | random_line_split |
violations.py | """Docstring violation definition."""
from collections import namedtuple
from functools import partial
from itertools import dropwhile
from typing import Any, Callable, Iterable, List, Optional
from .parser import Definition
from .utils import is_blank
__all__ = ('Error', 'ErrorRegistry', 'conventions')
ErrorParams = namedtuple('ErrorParams', ['code', 'short_desc', 'context'])
class Error:
"""Error in docstring style."""
# Options that define how errors are printed:
explain = False
source = False
def __init__(
self,
code: str,
short_desc: str,
context: str,
*parameters: Iterable[str],
) -> None:
"""Initialize the object.
`parameters` are specific to the created error.
"""
self.code = code
self.short_desc = short_desc
self.context = context
self.parameters = parameters
self.definition = None # type: Optional[Definition]
self.explanation = None # type: Optional[str]
def set_context(self, definition: Definition, explanation: str) -> None:
"""Set the source code context for this error."""
self.definition = definition
self.explanation = explanation
filename = property(lambda self: self.definition.module.name)
line = property(lambda self: self.definition.error_lineno)
@property
def message(self) -> str:
"""Return the message to print to the user."""
ret = f'{self.code}: {self.short_desc}'
if self.context is not None:
specific_error_msg = self.context.format(*self.parameters)
ret += f' ({specific_error_msg})'
return ret
@property
def lines(self) -> str:
"""Return the source code lines for this error."""
if self.definition is None:
return ''
source = ''
lines = self.definition.source.splitlines(keepends=True)
offset = self.definition.start # type: ignore
lines_stripped = list(
reversed(list(dropwhile(is_blank, reversed(lines))))
)
numbers_width = len(str(offset + len(lines_stripped)))
line_format = f'{{:{numbers_width}}}:{{}}'
for n, line in enumerate(lines_stripped):
if line:
line = ' ' + line
source += line_format.format(n + offset, line)
if n > 5:
source += ' ...\n'
break
return source
def __str__(self) -> str:
if self.explanation:
self.explanation = '\n'.join(
l for l in self.explanation.split('\n') if not is_blank(l)
)
template = '{filename}:{line} {definition}:\n {message}'
if self.source and self.explain:
template += '\n\n{explanation}\n\n{lines}\n'
elif self.source and not self.explain:
template += '\n\n{lines}\n'
elif self.explain and not self.source:
template += '\n\n{explanation}\n\n'
return template.format(
**{
name: getattr(self, name)
for name in [
'filename',
'line',
'definition',
'message',
'explanation',
'lines',
]
}
)
def __repr__(self) -> str:
return str(self)
def __lt__(self, other: 'Error') -> bool:
return (self.filename, self.line) < (other.filename, other.line)
class ErrorRegistry:
"""A registry of all error codes, divided to groups."""
groups = [] # type: ignore
class ErrorGroup:
"""A group of similarly themed errors."""
def __init__(self, prefix: str, name: str) -> None:
"""Initialize the object.
`Prefix` should be the common prefix for errors in this group,
e.g., "D1".
`name` is the name of the group (its subject).
"""
self.prefix = prefix
self.name = name
self.errors = [] # type: List[ErrorParams]
def create_error(
self,
error_code: str,
error_desc: str,
error_context: Optional[str] = None,
) -> Callable[[Iterable[str]], Error]:
"""Create an error, register it to this group and return it."""
# TODO: check prefix
error_params = ErrorParams(error_code, error_desc, error_context)
factory = partial(Error, *error_params)
self.errors.append(error_params)
return factory
@classmethod
def create_group(cls, prefix: str, name: str) -> ErrorGroup:
"""Create a new error group and return it."""
group = cls.ErrorGroup(prefix, name)
cls.groups.append(group)
return group
@classmethod
def get_error_codes(cls) -> Iterable[str]:
"""Yield all registered codes."""
for group in cls.groups:
|
@classmethod
def to_rst(cls) -> str:
"""Output the registry as reStructuredText, for documentation."""
max_len = max(
len(error.short_desc)
for group in cls.groups
for error in group.errors
)
sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n'
blank_line = '|' + (max_len + 9) * ' ' + '|\n'
table = ''
for group in cls.groups:
table += sep_line
table += blank_line
table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n'
table += blank_line
for error in group.errors:
table += sep_line
table += (
'|'
+ error.code.center(6)
+ '| '
+ error.short_desc.ljust(max_len + 1)
+ '|\n'
)
table += sep_line
return table
D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings')
D100 = D1xx.create_error(
'D100',
'Missing docstring in public module',
)
D101 = D1xx.create_error(
'D101',
'Missing docstring in public class',
)
D102 = D1xx.create_error(
'D102',
'Missing docstring in public method',
)
D103 = D1xx.create_error(
'D103',
'Missing docstring in public function',
)
D104 = D1xx.create_error(
'D104',
'Missing docstring in public package',
)
D105 = D1xx.create_error(
'D105',
'Missing docstring in magic method',
)
D106 = D1xx.create_error(
'D106',
'Missing docstring in public nested class',
)
D107 = D1xx.create_error(
'D107',
'Missing docstring in __init__',
)
D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues')
D200 = D2xx.create_error(
'D200',
'One-line docstring should fit on one line ' 'with quotes',
'found {0}',
)
D201 = D2xx.create_error(
'D201',
'No blank lines allowed before function docstring',
'found {0}',
)
D202 = D2xx.create_error(
'D202',
'No blank lines allowed after function docstring',
'found {0}',
)
D203 = D2xx.create_error(
'D203',
'1 blank line required before class docstring',
'found {0}',
)
D204 = D2xx.create_error(
'D204',
'1 blank line required after class docstring',
'found {0}',
)
D205 = D2xx.create_error(
'D205',
'1 blank line required between summary line and description',
'found {0}',
)
D206 = D2xx.create_error(
'D206',
'Docstring should be indented with spaces, not tabs',
)
D207 = D2xx.create_error(
'D207',
'Docstring is under-indented',
)
D208 = D2xx.create_error(
'D208',
'Docstring is over-indented',
)
D209 = D2xx.create_error(
'D209',
'Multi-line docstring closing quotes should be on a separate line',
)
D210 = D2xx.create_error(
'D210',
'No whitespaces allowed surrounding docstring text',
)
D211 = D2xx.create_error(
'D211',
'No blank lines allowed before class docstring',
'found {0}',
)
D212 = D2xx.create_error(
'D212',
'Multi-line docstring summary should start at the first line',
)
D213 = D2xx.create_error(
'D213',
'Multi-line docstring summary should start at the second line',
)
D214 = D2xx.create_error(
'D214',
'Section is over-indented',
'{0!r}',
)
D215 = D2xx.create_error(
'D215',
'Section underline is over-indented',
'in section {0!r}',
)
D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues')
D300 = D3xx.create_error(
'D300',
'Use """triple double quotes"""',
'found {0}-quotes',
)
D301 = D3xx.create_error(
'D301',
'Use r""" if any backslashes in a docstring',
)
D302 = D3xx.create_error(
'D302',
'Deprecated: Use u""" for Unicode docstrings',
)
D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues')
D400 = D4xx.create_error(
'D400',
'First line should end with a period',
'not {0!r}',
)
D401 = D4xx.create_error(
'D401',
'First line should be in imperative mood',
"perhaps '{0}', not '{1}'",
)
D401b = D4xx.create_error(
'D401',
'First line should be in imperative mood; try rephrasing',
"found '{0}'",
)
D402 = D4xx.create_error(
'D402',
'First line should not be the function\'s "signature"',
)
D403 = D4xx.create_error(
'D403',
'First word of the first line should be properly capitalized',
'{0!r}, not {1!r}',
)
D404 = D4xx.create_error(
'D404',
'First word of the docstring should not be `This`',
)
D405 = D4xx.create_error(
'D405',
'Section name should be properly capitalized',
'{0!r}, not {1!r}',
)
D406 = D4xx.create_error(
'D406',
'Section name should end with a newline',
'{0!r}, not {1!r}',
)
D407 = D4xx.create_error(
'D407',
'Missing dashed underline after section',
'{0!r}',
)
D408 = D4xx.create_error(
'D408',
'Section underline should be in the line following the section\'s name',
'{0!r}',
)
D409 = D4xx.create_error(
'D409',
'Section underline should match the length of its name',
'Expected {0!r} dashes in section {1!r}, got {2!r}',
)
D410 = D4xx.create_error(
'D410',
'Missing blank line after section',
'{0!r}',
)
D411 = D4xx.create_error(
'D411',
'Missing blank line before section',
'{0!r}',
)
D412 = D4xx.create_error(
'D412',
'No blank lines allowed between a section header and its content',
'{0!r}',
)
D413 = D4xx.create_error(
'D413',
'Missing blank line after last section',
'{0!r}',
)
D414 = D4xx.create_error(
'D414',
'Section has no content',
'{0!r}',
)
D415 = D4xx.create_error(
'D415',
(
'First line should end with a period, question '
'mark, or exclamation point'
),
'not {0!r}',
)
D416 = D4xx.create_error(
'D416',
'Section name should end with a colon',
'{0!r}, not {1!r}',
)
D417 = D4xx.create_error(
'D417',
'Missing argument descriptions in the docstring',
'argument(s) {0} are missing descriptions in {1!r} docstring',
)
D418 = D4xx.create_error(
'D418',
'Function/ Method decorated with @overload shouldn\'t contain a docstring',
)
class AttrDict(dict):
def __getattr__(self, item: str) -> Any:
return self[item]
all_errors = set(ErrorRegistry.get_error_codes())
conventions = AttrDict(
{
'pep257': all_errors
- {
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
'D418',
},
'numpy': all_errors
- {
'D107',
'D203',
'D212',
'D213',
'D402',
'D413',
'D415',
'D416',
'D417',
},
'google': all_errors
- {
'D203',
'D204',
'D213',
'D215',
'D400',
'D401',
'D404',
'D406',
'D407',
'D408',
'D409',
'D413',
},
}
)
| for error in group.errors:
yield error.code | conditional_block |
violations.py | """Docstring violation definition."""
from collections import namedtuple
from functools import partial
from itertools import dropwhile
from typing import Any, Callable, Iterable, List, Optional
from .parser import Definition
from .utils import is_blank
__all__ = ('Error', 'ErrorRegistry', 'conventions')
ErrorParams = namedtuple('ErrorParams', ['code', 'short_desc', 'context'])
class Error:
"""Error in docstring style."""
# Options that define how errors are printed:
explain = False
source = False
def __init__(
self,
code: str,
short_desc: str,
context: str,
*parameters: Iterable[str],
) -> None:
"""Initialize the object.
`parameters` are specific to the created error.
"""
self.code = code
self.short_desc = short_desc
self.context = context
self.parameters = parameters
self.definition = None # type: Optional[Definition]
self.explanation = None # type: Optional[str]
def set_context(self, definition: Definition, explanation: str) -> None:
"""Set the source code context for this error."""
self.definition = definition
self.explanation = explanation
filename = property(lambda self: self.definition.module.name)
line = property(lambda self: self.definition.error_lineno)
@property
def message(self) -> str:
"""Return the message to print to the user."""
ret = f'{self.code}: {self.short_desc}'
if self.context is not None:
specific_error_msg = self.context.format(*self.parameters)
ret += f' ({specific_error_msg})'
return ret
@property
def lines(self) -> str:
"""Return the source code lines for this error."""
if self.definition is None:
return ''
source = ''
lines = self.definition.source.splitlines(keepends=True)
offset = self.definition.start # type: ignore
lines_stripped = list(
reversed(list(dropwhile(is_blank, reversed(lines))))
)
numbers_width = len(str(offset + len(lines_stripped)))
line_format = f'{{:{numbers_width}}}:{{}}'
for n, line in enumerate(lines_stripped):
if line:
line = ' ' + line
source += line_format.format(n + offset, line)
if n > 5:
source += ' ...\n'
break
return source
def __str__(self) -> str:
if self.explanation:
self.explanation = '\n'.join(
l for l in self.explanation.split('\n') if not is_blank(l)
)
template = '{filename}:{line} {definition}:\n {message}'
if self.source and self.explain:
template += '\n\n{explanation}\n\n{lines}\n'
elif self.source and not self.explain:
template += '\n\n{lines}\n'
elif self.explain and not self.source:
template += '\n\n{explanation}\n\n'
return template.format(
**{
name: getattr(self, name)
for name in [
'filename',
'line',
'definition',
'message',
'explanation',
'lines',
]
}
)
def __repr__(self) -> str:
return str(self)
def __lt__(self, other: 'Error') -> bool:
return (self.filename, self.line) < (other.filename, other.line)
class ErrorRegistry:
"""A registry of all error codes, divided to groups."""
groups = [] # type: ignore
class ErrorGroup:
"""A group of similarly themed errors."""
def __init__(self, prefix: str, name: str) -> None:
"""Initialize the object.
`Prefix` should be the common prefix for errors in this group,
e.g., "D1".
`name` is the name of the group (its subject).
"""
self.prefix = prefix
self.name = name
self.errors = [] # type: List[ErrorParams]
def create_error(
self,
error_code: str,
error_desc: str,
error_context: Optional[str] = None,
) -> Callable[[Iterable[str]], Error]:
"""Create an error, register it to this group and return it."""
# TODO: check prefix
error_params = ErrorParams(error_code, error_desc, error_context)
factory = partial(Error, *error_params)
self.errors.append(error_params)
return factory
@classmethod
def create_group(cls, prefix: str, name: str) -> ErrorGroup:
"""Create a new error group and return it."""
group = cls.ErrorGroup(prefix, name)
cls.groups.append(group)
return group
@classmethod
def get_error_codes(cls) -> Iterable[str]:
"""Yield all registered codes."""
for group in cls.groups:
for error in group.errors:
yield error.code
@classmethod
def to_rst(cls) -> str:
"""Output the registry as reStructuredText, for documentation."""
max_len = max(
len(error.short_desc)
for group in cls.groups
for error in group.errors
)
sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n'
blank_line = '|' + (max_len + 9) * ' ' + '|\n'
table = ''
for group in cls.groups:
table += sep_line
table += blank_line
table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n'
table += blank_line
for error in group.errors:
table += sep_line
table += (
'|'
+ error.code.center(6)
+ '| '
+ error.short_desc.ljust(max_len + 1)
+ '|\n'
)
table += sep_line
return table
D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings')
D100 = D1xx.create_error(
'D100',
'Missing docstring in public module',
)
D101 = D1xx.create_error(
'D101',
'Missing docstring in public class',
)
D102 = D1xx.create_error(
'D102',
'Missing docstring in public method',
)
D103 = D1xx.create_error(
'D103',
'Missing docstring in public function',
)
D104 = D1xx.create_error(
'D104',
'Missing docstring in public package',
)
D105 = D1xx.create_error(
'D105',
'Missing docstring in magic method',
)
D106 = D1xx.create_error(
'D106',
'Missing docstring in public nested class',
)
D107 = D1xx.create_error(
'D107',
'Missing docstring in __init__',
)
D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues')
D200 = D2xx.create_error(
'D200',
'One-line docstring should fit on one line ' 'with quotes',
'found {0}',
)
D201 = D2xx.create_error(
'D201',
'No blank lines allowed before function docstring',
'found {0}',
)
D202 = D2xx.create_error(
'D202',
'No blank lines allowed after function docstring',
'found {0}',
)
D203 = D2xx.create_error(
'D203',
'1 blank line required before class docstring',
'found {0}',
)
D204 = D2xx.create_error(
'D204',
'1 blank line required after class docstring',
'found {0}',
)
D205 = D2xx.create_error(
'D205',
'1 blank line required between summary line and description',
'found {0}',
)
D206 = D2xx.create_error(
'D206',
'Docstring should be indented with spaces, not tabs',
)
D207 = D2xx.create_error(
'D207',
'Docstring is under-indented',
)
D208 = D2xx.create_error(
'D208',
'Docstring is over-indented',
)
D209 = D2xx.create_error(
'D209',
'Multi-line docstring closing quotes should be on a separate line',
)
D210 = D2xx.create_error(
'D210',
'No whitespaces allowed surrounding docstring text',
)
D211 = D2xx.create_error(
'D211',
'No blank lines allowed before class docstring',
'found {0}',
)
D212 = D2xx.create_error(
'D212',
'Multi-line docstring summary should start at the first line',
)
D213 = D2xx.create_error(
'D213',
'Multi-line docstring summary should start at the second line',
)
D214 = D2xx.create_error(
'D214',
'Section is over-indented',
'{0!r}',
)
D215 = D2xx.create_error(
'D215',
'Section underline is over-indented',
'in section {0!r}',
)
D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues')
D300 = D3xx.create_error(
'D300',
'Use """triple double quotes"""',
'found {0}-quotes',
)
D301 = D3xx.create_error(
'D301',
'Use r""" if any backslashes in a docstring',
)
D302 = D3xx.create_error(
'D302',
'Deprecated: Use u""" for Unicode docstrings',
)
D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues')
D400 = D4xx.create_error(
'D400',
'First line should end with a period',
'not {0!r}',
)
D401 = D4xx.create_error(
'D401',
'First line should be in imperative mood',
"perhaps '{0}', not '{1}'",
)
D401b = D4xx.create_error(
'D401',
'First line should be in imperative mood; try rephrasing',
"found '{0}'",
)
D402 = D4xx.create_error(
'D402',
'First line should not be the function\'s "signature"',
)
D403 = D4xx.create_error(
'D403',
'First word of the first line should be properly capitalized',
'{0!r}, not {1!r}',
)
D404 = D4xx.create_error(
'D404',
'First word of the docstring should not be `This`',
)
D405 = D4xx.create_error(
'D405',
'Section name should be properly capitalized',
'{0!r}, not {1!r}',
)
D406 = D4xx.create_error(
'D406',
'Section name should end with a newline',
'{0!r}, not {1!r}',
)
D407 = D4xx.create_error(
'D407',
'Missing dashed underline after section',
'{0!r}',
)
D408 = D4xx.create_error(
'D408',
'Section underline should be in the line following the section\'s name',
'{0!r}',
)
D409 = D4xx.create_error(
'D409',
'Section underline should match the length of its name',
'Expected {0!r} dashes in section {1!r}, got {2!r}',
)
D410 = D4xx.create_error(
'D410',
'Missing blank line after section',
'{0!r}',
)
D411 = D4xx.create_error(
'D411',
'Missing blank line before section',
'{0!r}',
)
D412 = D4xx.create_error(
'D412',
'No blank lines allowed between a section header and its content',
'{0!r}',
)
D413 = D4xx.create_error(
'D413',
'Missing blank line after last section',
'{0!r}',
)
D414 = D4xx.create_error(
'D414',
'Section has no content',
'{0!r}',
)
D415 = D4xx.create_error(
'D415',
(
'First line should end with a period, question '
'mark, or exclamation point'
),
'not {0!r}',
)
D416 = D4xx.create_error(
'D416',
'Section name should end with a colon',
'{0!r}, not {1!r}',
)
D417 = D4xx.create_error(
'D417',
'Missing argument descriptions in the docstring',
'argument(s) {0} are missing descriptions in {1!r} docstring',
)
D418 = D4xx.create_error(
'D418',
'Function/ Method decorated with @overload shouldn\'t contain a docstring',
)
class AttrDict(dict):
def | (self, item: str) -> Any:
return self[item]
all_errors = set(ErrorRegistry.get_error_codes())
conventions = AttrDict(
{
'pep257': all_errors
- {
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
'D418',
},
'numpy': all_errors
- {
'D107',
'D203',
'D212',
'D213',
'D402',
'D413',
'D415',
'D416',
'D417',
},
'google': all_errors
- {
'D203',
'D204',
'D213',
'D215',
'D400',
'D401',
'D404',
'D406',
'D407',
'D408',
'D409',
'D413',
},
}
)
| __getattr__ | identifier_name |
VoiceNetwork.tsx | import React, { useMemo, useState } from 'react';
import { TMemo } from '@shared/components/TMemo';
import { useRTCRoomClientContext } from '@rtc/RoomContext';
import { useAsyncTimeout } from '@shared/hooks/useAsyncTimeout';
import _get from 'lodash/get';
import filesize from 'filesize';
import { useTranslation } from '@shared/i18n';
/**
* 获取传输速度字符串
* @param bitrate 比特率
*/
function getStreamRate(bitrate: number): string {
return filesize(Number(bitrate) / 8, { bits: true }) + '/s';
}
/**
* 语音网络状态显示
*/
export const VoiceNetwork: React.FC = TMemo(() => {
const { client } = useRTCRoomClientContext();
const [remoteStats, setRemoteStats] = useState<any>({});
const { t } = useTranslation();
useAsyncTimeout(async () => {
if (client) {
const sendRemoteStats = await cl | () => {
return {
// 即生产者的传输通道在远程接收到的信息
upstream: getStreamRate(
_get(remoteStats, ['sendRemoteStats', 0, 'recvBitrate'], 0)
),
// 即消费者的传输通道在远程发送的信息
downstream: getStreamRate(
_get(remoteStats, ['recvRemoteStats', 0, 'sendBitrate'], 0)
),
};
}, [remoteStats]);
return (
<pre>
{t('上传')}: {bitrate.upstream} {t('下载')}: {bitrate.downstream}
</pre>
);
});
VoiceNetwork.displayName = 'VoiceNetwork';
| ient
.getSendTransportRemoteStats()
.catch(() => {});
const recvRemoteStats = await client
.getRecvTransportRemoteStats()
.catch(() => {});
setRemoteStats({
sendRemoteStats,
recvRemoteStats,
});
}
}, 2000);
const bitrate = useMemo( | conditional_block |
VoiceNetwork.tsx | import React, { useMemo, useState } from 'react';
import { TMemo } from '@shared/components/TMemo';
import { useRTCRoomClientContext } from '@rtc/RoomContext';
import { useAsyncTimeout } from '@shared/hooks/useAsyncTimeout';
import _get from 'lodash/get';
import filesize from 'filesize';
import { useTranslation } from '@shared/i18n';
/**
* 获取传输速度字符串
* @param bitrate 比特率
*/
function getStreamRate(bitrate: number): string {
return filesize(Numb | port const VoiceNetwork: React.FC = TMemo(() => {
const { client } = useRTCRoomClientContext();
const [remoteStats, setRemoteStats] = useState<any>({});
const { t } = useTranslation();
useAsyncTimeout(async () => {
if (client) {
const sendRemoteStats = await client
.getSendTransportRemoteStats()
.catch(() => {});
const recvRemoteStats = await client
.getRecvTransportRemoteStats()
.catch(() => {});
setRemoteStats({
sendRemoteStats,
recvRemoteStats,
});
}
}, 2000);
const bitrate = useMemo(() => {
return {
// 即生产者的传输通道在远程接收到的信息
upstream: getStreamRate(
_get(remoteStats, ['sendRemoteStats', 0, 'recvBitrate'], 0)
),
// 即消费者的传输通道在远程发送的信息
downstream: getStreamRate(
_get(remoteStats, ['recvRemoteStats', 0, 'sendBitrate'], 0)
),
};
}, [remoteStats]);
return (
<pre>
{t('上传')}: {bitrate.upstream} {t('下载')}: {bitrate.downstream}
</pre>
);
});
VoiceNetwork.displayName = 'VoiceNetwork';
| er(bitrate) / 8, { bits: true }) + '/s';
}
/**
* 语音网络状态显示
*/
ex | identifier_body |
VoiceNetwork.tsx | import React, { useMemo, useState } from 'react';
import { TMemo } from '@shared/components/TMemo';
import { useRTCRoomClientContext } from '@rtc/RoomContext';
import { useAsyncTimeout } from '@shared/hooks/useAsyncTimeout';
import _get from 'lodash/get';
import filesize from 'filesize';
import { useTranslation } from '@shared/i18n';
/**
* 获取传输速度字符串
* @param bitrate 比特率
*/
function getStreamRate(bitrate: number): string {
return filesize(Number(bitrate) / 8, { bits: true }) + '/s';
}
/**
* 语音网络状态显示
*/
export const VoiceNetwork: React.FC = TMemo(() => {
const { client } = useRTCRoomClientContext();
const [remoteStats, setRemoteStats] = useState<any>({});
const { t } = useTranslation();
useAsyncTimeout(async () => {
if (client) {
const sendRemoteStats = await client
.getSendTransportRemoteStats()
.catch(() => {});
const recvRemoteStats = await client
.getRecvTransportRemoteStats()
.catch(() => {});
setRemoteStats({
sendRemoteStats,
recvRemoteStats,
});
}
}, 2000);
const bitrate = useMemo(() => {
return {
// 即生产者的传输通道在远程接收到的信息
upstream: getStreamRate(
_get(remoteStats, ['sendRemoteStats', 0, 'recvBitrate'], 0)
),
// 即消费者的传输通道在远程发送的信息
downstream: getStreamRate( |
return (
<pre>
{t('上传')}: {bitrate.upstream} {t('下载')}: {bitrate.downstream}
</pre>
);
});
VoiceNetwork.displayName = 'VoiceNetwork'; | _get(remoteStats, ['recvRemoteStats', 0, 'sendBitrate'], 0)
),
};
}, [remoteStats]); | random_line_split |
VoiceNetwork.tsx | import React, { useMemo, useState } from 'react';
import { TMemo } from '@shared/components/TMemo';
import { useRTCRoomClientContext } from '@rtc/RoomContext';
import { useAsyncTimeout } from '@shared/hooks/useAsyncTimeout';
import _get from 'lodash/get';
import filesize from 'filesize';
import { useTranslation } from '@shared/i18n';
/**
* 获取传输速度字符串
* @param bitrate 比特率
*/
function getStreamRate(bitrate: n | g {
return filesize(Number(bitrate) / 8, { bits: true }) + '/s';
}
/**
* 语音网络状态显示
*/
export const VoiceNetwork: React.FC = TMemo(() => {
const { client } = useRTCRoomClientContext();
const [remoteStats, setRemoteStats] = useState<any>({});
const { t } = useTranslation();
useAsyncTimeout(async () => {
if (client) {
const sendRemoteStats = await client
.getSendTransportRemoteStats()
.catch(() => {});
const recvRemoteStats = await client
.getRecvTransportRemoteStats()
.catch(() => {});
setRemoteStats({
sendRemoteStats,
recvRemoteStats,
});
}
}, 2000);
const bitrate = useMemo(() => {
return {
// 即生产者的传输通道在远程接收到的信息
upstream: getStreamRate(
_get(remoteStats, ['sendRemoteStats', 0, 'recvBitrate'], 0)
),
// 即消费者的传输通道在远程发送的信息
downstream: getStreamRate(
_get(remoteStats, ['recvRemoteStats', 0, 'sendBitrate'], 0)
),
};
}, [remoteStats]);
return (
<pre>
{t('上传')}: {bitrate.upstream} {t('下载')}: {bitrate.downstream}
</pre>
);
});
VoiceNetwork.displayName = 'VoiceNetwork';
| umber): strin | identifier_name |
rayon.rs | use crate::{ProgressBar, ProgressBarIter};
use rayon::iter::{
plumbing::{Consumer, Folder, Producer, ProducerCallback, UnindexedConsumer},
IndexedParallelIterator, ParallelIterator,
};
use std::convert::TryFrom;
/// Wraps a Rayon parallel iterator.
///
/// See [`ProgressIterator`](trait.ProgressIterator.html) for method
/// documentation.
pub trait ParallelProgressIterator
where
Self: Sized + ParallelIterator,
{
/// Wrap an iterator with a custom progress bar.
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self>;
/// Wrap an iterator with an explicit element count.
fn progress_count(self, len: u64) -> ProgressBarIter<Self> {
self.progress_with(ProgressBar::new(len))
}
fn progress(self) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
self.progress_count(len)
}
/// Wrap an iterator with a progress bar and style it.
fn progress_with_style(self, style: crate::ProgressStyle) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
let bar = ProgressBar::new(len).with_style(style);
self.progress_with(bar)
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelProgressIterator for T {
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self> {
ProgressBarIter { it: self, progress }
}
}
impl<S: Send, T: IndexedParallelIterator<Item = S>> IndexedParallelIterator for ProgressBarIter<T> {
fn len(&self) -> usize |
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> <C as Consumer<Self::Item>>::Result {
let consumer = ProgressConsumer::new(consumer, self.progress);
self.it.drive(consumer)
}
fn with_producer<CB: ProducerCallback<Self::Item>>(
self,
callback: CB,
) -> <CB as ProducerCallback<Self::Item>>::Output {
return self.it.with_producer(Callback {
callback,
progress: self.progress,
});
struct Callback<CB> {
callback: CB,
progress: ProgressBar,
}
impl<T, CB: ProducerCallback<T>> ProducerCallback<T> for Callback<CB> {
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = ProgressProducer {
base,
progress: self.progress,
};
self.callback.callback(producer)
}
}
}
}
struct ProgressProducer<T> {
base: T,
progress: ProgressBar,
}
impl<T, P: Producer<Item = T>> Producer for ProgressProducer<P> {
type Item = T;
type IntoIter = ProgressBarIter<P::IntoIter>;
fn into_iter(self) -> Self::IntoIter {
ProgressBarIter {
it: self.base.into_iter(),
progress: self.progress,
}
}
fn min_len(&self) -> usize {
self.base.min_len()
}
fn max_len(&self) -> usize {
self.base.max_len()
}
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
ProgressProducer {
base: left,
progress: self.progress.clone(),
},
ProgressProducer {
base: right,
progress: self.progress,
},
)
}
}
struct ProgressConsumer<C> {
base: C,
progress: ProgressBar,
}
impl<C> ProgressConsumer<C> {
fn new(base: C, progress: ProgressBar) -> Self {
ProgressConsumer { base, progress }
}
}
impl<T, C: Consumer<T>> Consumer<T> for ProgressConsumer<C> {
type Folder = ProgressFolder<C::Folder>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
ProgressConsumer::new(left, self.progress.clone()),
ProgressConsumer::new(right, self.progress),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
ProgressFolder {
base: self.base.into_folder(),
progress: self.progress,
}
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<T, C: UnindexedConsumer<T>> UnindexedConsumer<T> for ProgressConsumer<C> {
fn split_off_left(&self) -> Self {
ProgressConsumer::new(self.base.split_off_left(), self.progress.clone())
}
fn to_reducer(&self) -> Self::Reducer {
self.base.to_reducer()
}
}
struct ProgressFolder<C> {
base: C,
progress: ProgressBar,
}
impl<T, C: Folder<T>> Folder<T> for ProgressFolder<C> {
type Result = C::Result;
fn consume(self, item: T) -> Self {
self.progress.inc(1);
ProgressFolder {
base: self.base.consume(item),
progress: self.progress,
}
}
fn complete(self) -> C::Result {
self.base.complete()
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelIterator for ProgressBarIter<T> {
type Item = S;
fn drive_unindexed<C: UnindexedConsumer<Self::Item>>(self, consumer: C) -> C::Result {
let consumer1 = ProgressConsumer::new(consumer, self.progress.clone());
self.it.drive_unindexed(consumer1)
}
}
#[cfg(test)]
mod test {
use crate::ProgressStyle;
use crate::{ParallelProgressIterator, ProgressBar, ProgressBarIter};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
#[test]
fn it_can_wrap_a_parallel_iterator() {
let v = vec![1, 2, 3];
fn wrap<'a, T: ParallelIterator<Item = &'a i32>>(it: ProgressBarIter<T>) {
assert_eq!(it.map(|x| x * 2).collect::<Vec<_>>(), vec![2, 4, 6]);
}
wrap(v.par_iter().progress_count(3));
wrap({
let pb = ProgressBar::new(v.len() as u64);
v.par_iter().progress_with(pb)
});
wrap({
let style = ProgressStyle::default_bar().template("{wide_bar:.red} {percent}/100%");
v.par_iter().progress_with_style(style)
});
}
}
| {
self.it.len()
} | identifier_body |
rayon.rs | use crate::{ProgressBar, ProgressBarIter};
use rayon::iter::{
plumbing::{Consumer, Folder, Producer, ProducerCallback, UnindexedConsumer},
IndexedParallelIterator, ParallelIterator,
};
use std::convert::TryFrom;
/// Wraps a Rayon parallel iterator.
///
/// See [`ProgressIterator`](trait.ProgressIterator.html) for method
/// documentation.
pub trait ParallelProgressIterator
where
Self: Sized + ParallelIterator,
{
/// Wrap an iterator with a custom progress bar.
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self>;
/// Wrap an iterator with an explicit element count.
fn progress_count(self, len: u64) -> ProgressBarIter<Self> {
self.progress_with(ProgressBar::new(len))
}
fn progress(self) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
self.progress_count(len)
}
/// Wrap an iterator with a progress bar and style it.
fn progress_with_style(self, style: crate::ProgressStyle) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
let bar = ProgressBar::new(len).with_style(style);
self.progress_with(bar)
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelProgressIterator for T {
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self> {
ProgressBarIter { it: self, progress }
}
}
impl<S: Send, T: IndexedParallelIterator<Item = S>> IndexedParallelIterator for ProgressBarIter<T> {
fn len(&self) -> usize {
self.it.len()
}
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> <C as Consumer<Self::Item>>::Result {
let consumer = ProgressConsumer::new(consumer, self.progress);
self.it.drive(consumer)
}
fn with_producer<CB: ProducerCallback<Self::Item>>(
self,
callback: CB,
) -> <CB as ProducerCallback<Self::Item>>::Output {
return self.it.with_producer(Callback {
callback,
progress: self.progress,
});
struct Callback<CB> {
callback: CB,
progress: ProgressBar,
}
impl<T, CB: ProducerCallback<T>> ProducerCallback<T> for Callback<CB> {
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = ProgressProducer {
base,
progress: self.progress,
};
self.callback.callback(producer)
}
}
}
}
struct ProgressProducer<T> {
base: T,
progress: ProgressBar,
}
impl<T, P: Producer<Item = T>> Producer for ProgressProducer<P> {
type Item = T;
type IntoIter = ProgressBarIter<P::IntoIter>;
fn into_iter(self) -> Self::IntoIter {
ProgressBarIter {
it: self.base.into_iter(),
progress: self.progress,
}
}
fn min_len(&self) -> usize {
self.base.min_len()
}
fn max_len(&self) -> usize {
self.base.max_len()
}
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
ProgressProducer {
base: left,
progress: self.progress.clone(),
},
ProgressProducer {
base: right,
progress: self.progress,
},
)
}
}
struct ProgressConsumer<C> {
base: C,
progress: ProgressBar,
}
impl<C> ProgressConsumer<C> {
fn new(base: C, progress: ProgressBar) -> Self {
ProgressConsumer { base, progress }
}
}
impl<T, C: Consumer<T>> Consumer<T> for ProgressConsumer<C> {
type Folder = ProgressFolder<C::Folder>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
ProgressConsumer::new(left, self.progress.clone()),
ProgressConsumer::new(right, self.progress),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
ProgressFolder {
base: self.base.into_folder(),
progress: self.progress,
}
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<T, C: UnindexedConsumer<T>> UnindexedConsumer<T> for ProgressConsumer<C> {
fn split_off_left(&self) -> Self {
ProgressConsumer::new(self.base.split_off_left(), self.progress.clone())
}
fn to_reducer(&self) -> Self::Reducer {
self.base.to_reducer()
}
}
struct ProgressFolder<C> {
base: C,
progress: ProgressBar,
}
impl<T, C: Folder<T>> Folder<T> for ProgressFolder<C> {
type Result = C::Result;
fn | (self, item: T) -> Self {
self.progress.inc(1);
ProgressFolder {
base: self.base.consume(item),
progress: self.progress,
}
}
fn complete(self) -> C::Result {
self.base.complete()
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelIterator for ProgressBarIter<T> {
type Item = S;
fn drive_unindexed<C: UnindexedConsumer<Self::Item>>(self, consumer: C) -> C::Result {
let consumer1 = ProgressConsumer::new(consumer, self.progress.clone());
self.it.drive_unindexed(consumer1)
}
}
#[cfg(test)]
mod test {
use crate::ProgressStyle;
use crate::{ParallelProgressIterator, ProgressBar, ProgressBarIter};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
#[test]
fn it_can_wrap_a_parallel_iterator() {
let v = vec![1, 2, 3];
fn wrap<'a, T: ParallelIterator<Item = &'a i32>>(it: ProgressBarIter<T>) {
assert_eq!(it.map(|x| x * 2).collect::<Vec<_>>(), vec![2, 4, 6]);
}
wrap(v.par_iter().progress_count(3));
wrap({
let pb = ProgressBar::new(v.len() as u64);
v.par_iter().progress_with(pb)
});
wrap({
let style = ProgressStyle::default_bar().template("{wide_bar:.red} {percent}/100%");
v.par_iter().progress_with_style(style)
});
}
}
| consume | identifier_name |
rayon.rs | use crate::{ProgressBar, ProgressBarIter};
use rayon::iter::{
plumbing::{Consumer, Folder, Producer, ProducerCallback, UnindexedConsumer},
IndexedParallelIterator, ParallelIterator,
};
use std::convert::TryFrom;
/// Wraps a Rayon parallel iterator.
///
/// See [`ProgressIterator`](trait.ProgressIterator.html) for method
/// documentation.
pub trait ParallelProgressIterator
where
Self: Sized + ParallelIterator,
{
/// Wrap an iterator with a custom progress bar.
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self>;
/// Wrap an iterator with an explicit element count.
fn progress_count(self, len: u64) -> ProgressBarIter<Self> {
self.progress_with(ProgressBar::new(len))
}
fn progress(self) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
self.progress_count(len)
}
/// Wrap an iterator with a progress bar and style it.
fn progress_with_style(self, style: crate::ProgressStyle) -> ProgressBarIter<Self>
where
Self: IndexedParallelIterator,
{
let len = u64::try_from(self.len()).unwrap();
let bar = ProgressBar::new(len).with_style(style);
self.progress_with(bar)
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelProgressIterator for T {
fn progress_with(self, progress: ProgressBar) -> ProgressBarIter<Self> {
ProgressBarIter { it: self, progress }
}
}
impl<S: Send, T: IndexedParallelIterator<Item = S>> IndexedParallelIterator for ProgressBarIter<T> {
fn len(&self) -> usize {
self.it.len()
}
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> <C as Consumer<Self::Item>>::Result {
let consumer = ProgressConsumer::new(consumer, self.progress);
self.it.drive(consumer)
}
fn with_producer<CB: ProducerCallback<Self::Item>>(
self,
callback: CB,
) -> <CB as ProducerCallback<Self::Item>>::Output {
return self.it.with_producer(Callback {
callback,
progress: self.progress,
});
struct Callback<CB> {
callback: CB,
progress: ProgressBar,
}
impl<T, CB: ProducerCallback<T>> ProducerCallback<T> for Callback<CB> {
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = ProgressProducer {
base,
progress: self.progress,
};
self.callback.callback(producer)
}
}
}
}
struct ProgressProducer<T> {
base: T,
progress: ProgressBar,
}
impl<T, P: Producer<Item = T>> Producer for ProgressProducer<P> {
type Item = T;
type IntoIter = ProgressBarIter<P::IntoIter>;
fn into_iter(self) -> Self::IntoIter {
ProgressBarIter {
it: self.base.into_iter(),
progress: self.progress,
}
}
fn min_len(&self) -> usize {
self.base.min_len()
}
fn max_len(&self) -> usize {
self.base.max_len()
}
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
ProgressProducer {
base: left,
progress: self.progress.clone(),
},
ProgressProducer {
base: right,
progress: self.progress,
},
)
}
}
struct ProgressConsumer<C> {
base: C,
progress: ProgressBar,
}
impl<C> ProgressConsumer<C> { | }
impl<T, C: Consumer<T>> Consumer<T> for ProgressConsumer<C> {
type Folder = ProgressFolder<C::Folder>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
ProgressConsumer::new(left, self.progress.clone()),
ProgressConsumer::new(right, self.progress),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
ProgressFolder {
base: self.base.into_folder(),
progress: self.progress,
}
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<T, C: UnindexedConsumer<T>> UnindexedConsumer<T> for ProgressConsumer<C> {
fn split_off_left(&self) -> Self {
ProgressConsumer::new(self.base.split_off_left(), self.progress.clone())
}
fn to_reducer(&self) -> Self::Reducer {
self.base.to_reducer()
}
}
struct ProgressFolder<C> {
base: C,
progress: ProgressBar,
}
impl<T, C: Folder<T>> Folder<T> for ProgressFolder<C> {
type Result = C::Result;
fn consume(self, item: T) -> Self {
self.progress.inc(1);
ProgressFolder {
base: self.base.consume(item),
progress: self.progress,
}
}
fn complete(self) -> C::Result {
self.base.complete()
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<S: Send, T: ParallelIterator<Item = S>> ParallelIterator for ProgressBarIter<T> {
type Item = S;
fn drive_unindexed<C: UnindexedConsumer<Self::Item>>(self, consumer: C) -> C::Result {
let consumer1 = ProgressConsumer::new(consumer, self.progress.clone());
self.it.drive_unindexed(consumer1)
}
}
#[cfg(test)]
mod test {
use crate::ProgressStyle;
use crate::{ParallelProgressIterator, ProgressBar, ProgressBarIter};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
#[test]
fn it_can_wrap_a_parallel_iterator() {
let v = vec![1, 2, 3];
fn wrap<'a, T: ParallelIterator<Item = &'a i32>>(it: ProgressBarIter<T>) {
assert_eq!(it.map(|x| x * 2).collect::<Vec<_>>(), vec![2, 4, 6]);
}
wrap(v.par_iter().progress_count(3));
wrap({
let pb = ProgressBar::new(v.len() as u64);
v.par_iter().progress_with(pb)
});
wrap({
let style = ProgressStyle::default_bar().template("{wide_bar:.red} {percent}/100%");
v.par_iter().progress_with_style(style)
});
}
} | fn new(base: C, progress: ProgressBar) -> Self {
ProgressConsumer { base, progress }
} | random_line_split |
class_physics_component.js | var class_physics_component =
[
[ "createXmlNode", "class_physics_component.html#a5a2e3761a13d45a4dd38fe3b69253332", null ],
[ "destroyDispatcher", "class_physics_component.html#a3c17f238e0ea725fc91a151591bf9510", null ],
[ "getPosition", "class_physics_component.html#aeee07d4204bae0ff7747f5c0009907a1", null ],
[ "setPosition", "class_physics_component.html#a12d373e7cba22ea2d5925063664fc7e2", null ],
[ "PhysicsSystem", "class_physics_component.html#a6fb7520528fab4a670001f041b872bf2", null ], | [ "myPhysicsSystem", "class_physics_component.html#a975c62b57bcba88f3738edfe308da17b", null ]
]; | random_line_split |
|
view.tree.js | class TreeView {
constructor($dom, store, adapter) {
this.store = store;
this.adapter = adapter;
this.$view = $dom.find('.octotree_treeview');
this.$tree = this.$view
.find('.octotree_view_body')
.on('click.jstree', '.jstree-open>a', ({target}) => {
setTimeout(() => {
this.$jstree.close_node(target)
}, 0);
})
.on('click.jstree', '.jstree-closed>a', ({target}) => {
setTimeout(() => {
this.$jstree.open_node(target)
}, 0);
})
.on('click', this._onItemClick.bind(this))
.jstree({
core: {multiple: false, worker: false, themes: {responsive: false}},
plugins: ['wholerow']
});
}
get $jstree() {
return this.$tree.jstree(true);
}
show(repo, token) {
const $jstree = this.$jstree;
$jstree.settings.core.data = (node, cb) => {
const prMode = this.store.get(STORE.PR) && repo.pullNumber;
const loadAll = this.adapter.canLoadEntireTree(repo) && (prMode || this.store.get(STORE.LOADALL));
node = !loadAll && (node.id === '#' ? {path: ''} : node.original);
this.adapter.loadCodeTree({repo, token, node}, (err, treeData) => {
if (err) {
if (err.status === 206 && loadAll) { // The repo is too big to load all, need to retry
$jstree.refresh(true);
} else {
$(this).trigger(EVENT.FETCH_ERROR, [err]);
}
} else {
treeData = this._sort(treeData);
if (loadAll) {
treeData = this._collapse(treeData);
}
cb(treeData);
}
});
};
this.$tree.one('refresh.jstree', () => {
this.syncSelection(repo);
$(this).trigger(EVENT.VIEW_READY);
});
this._showHeader(repo);
$jstree.refresh(true);
}
_showHeader(repo) {
const adapter = this.adapter;
this.$view
.find('.octotree_view_header')
.html(
`<div class="octotree_header_repo">
<a href="/${repo.username}">${repo.username}</a>
/
<a data-pjax href="/${repo.username}/${repo.reponame}">${repo.reponame}</a>
</div>
<div class="octotree_header_branch">
${this._deXss(repo.branch.toString())}
</div>`
)
.on('click', 'a[data-pjax]', function(event) {
event.preventDefault();
// A.href always return absolute URL, don't want that
const href = $(this).attr('href');
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
});
}
_deXss(str) {
return str && str.replace(/[<>'"&]/g, '-');
}
_sort(folder) {
folder.sort((a, b) => {
if (a.type === b.type) return a.text === b.text ? 0 : a.text < b.text ? -1 : 1;
return a.type === 'blob' ? 1 : -1;
});
folder.forEach((item) => {
if (item.type === 'tree' && item.children !== true && item.children.length > 0) {
this._sort(item.children);
}
});
return folder;
}
_collapse(folder) {
return folder.map((item) => {
if (item.type === 'tree') {
item.children = this._collapse(item.children);
if (item.children.length === 1 && item.children[0].type === 'tree') {
const onlyChild = item.children[0];
onlyChild.text = item.text + '/' + onlyChild.text;
return onlyChild;
}
}
return item;
});
}
_onItemClick(event) {
let $target = $(event.target);
let download = false;
// Handle middle click
if (event.which === 2) return;
// Handle icon click, fix #122
if ($target.is('i.jstree-icon')) {
$target = $target.parent();
download = true;
}
if (!$target.is('a.jstree-anchor')) return;
// Refocus after complete so that keyboard navigation works, fix #158
const refocusAfterCompletion = () => {
$(document).one('pjax:success page:load', () => {
this.$jstree.get_container().focus();
});
};
const adapter = this.adapter;
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
const href = $target.attr('href');
// The 2nd path is for submodule child links
const $icon = $target.children().length ? $target.children(':first') : $target.siblings(':first');
if ($icon.hasClass('commit')) {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectSubmodule(href);
} else if ($icon.hasClass('blob')) {
if (download) {
const downloadUrl = $target.attr('data-download-url');
const downloadFileName = $target.attr('data-download-filename');
adapter.downloadFile(downloadUrl, downloadFileName);
} else {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
}
}
}
syncSelection(repo) {
const $jstree = this.$jstree;
if (!$jstree) return;
// Convert /username/reponame/object_type/branch/path to path
const path = decodeURIComponent(location.pathname);
const match = path.match(/(?:[^\/]+\/){4}(.*)/);
if (!match) return;
const currentPath = match[1];
const loadAll = this.adapter.canLoadEntireTree(repo) && this.store.get(STORE.LOADALL);
selectPath(loadAll ? [currentPath] : breakPath(currentPath));
// Convert ['a/b'] to ['a', 'a/b']
function breakPath(fullPath) {
return fullPath.split('/').reduce((res, path, idx) => {
res.push(idx === 0 ? path : `${res[idx - 1]}/${path}`);
return res;
}, []); |
if ($jstree.get_node(nodeId)) {
$jstree.deselect_all();
$jstree.select_node(nodeId);
$jstree.open_node(nodeId, () => {
if (++index < paths.length) {
selectPath(paths, index);
}
});
}
}
}
} | }
function selectPath(paths, index = 0) {
const nodeId = NODE_PREFIX + paths[index]; | random_line_split |
view.tree.js | class TreeView {
constructor($dom, store, adapter) {
this.store = store;
this.adapter = adapter;
this.$view = $dom.find('.octotree_treeview');
this.$tree = this.$view
.find('.octotree_view_body')
.on('click.jstree', '.jstree-open>a', ({target}) => {
setTimeout(() => {
this.$jstree.close_node(target)
}, 0);
})
.on('click.jstree', '.jstree-closed>a', ({target}) => {
setTimeout(() => {
this.$jstree.open_node(target)
}, 0);
})
.on('click', this._onItemClick.bind(this))
.jstree({
core: {multiple: false, worker: false, themes: {responsive: false}},
plugins: ['wholerow']
});
}
get $jstree() {
return this.$tree.jstree(true);
}
show(repo, token) {
const $jstree = this.$jstree;
$jstree.settings.core.data = (node, cb) => {
const prMode = this.store.get(STORE.PR) && repo.pullNumber;
const loadAll = this.adapter.canLoadEntireTree(repo) && (prMode || this.store.get(STORE.LOADALL));
node = !loadAll && (node.id === '#' ? {path: ''} : node.original);
this.adapter.loadCodeTree({repo, token, node}, (err, treeData) => {
if (err) {
if (err.status === 206 && loadAll) { // The repo is too big to load all, need to retry
$jstree.refresh(true);
} else {
$(this).trigger(EVENT.FETCH_ERROR, [err]);
}
} else {
treeData = this._sort(treeData);
if (loadAll) {
treeData = this._collapse(treeData);
}
cb(treeData);
}
});
};
this.$tree.one('refresh.jstree', () => {
this.syncSelection(repo);
$(this).trigger(EVENT.VIEW_READY);
});
this._showHeader(repo);
$jstree.refresh(true);
}
_showHeader(repo) {
const adapter = this.adapter;
this.$view
.find('.octotree_view_header')
.html(
`<div class="octotree_header_repo">
<a href="/${repo.username}">${repo.username}</a>
/
<a data-pjax href="/${repo.username}/${repo.reponame}">${repo.reponame}</a>
</div>
<div class="octotree_header_branch">
${this._deXss(repo.branch.toString())}
</div>`
)
.on('click', 'a[data-pjax]', function(event) {
event.preventDefault();
// A.href always return absolute URL, don't want that
const href = $(this).attr('href');
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
});
}
_deXss(str) {
return str && str.replace(/[<>'"&]/g, '-');
}
| (folder) {
folder.sort((a, b) => {
if (a.type === b.type) return a.text === b.text ? 0 : a.text < b.text ? -1 : 1;
return a.type === 'blob' ? 1 : -1;
});
folder.forEach((item) => {
if (item.type === 'tree' && item.children !== true && item.children.length > 0) {
this._sort(item.children);
}
});
return folder;
}
_collapse(folder) {
return folder.map((item) => {
if (item.type === 'tree') {
item.children = this._collapse(item.children);
if (item.children.length === 1 && item.children[0].type === 'tree') {
const onlyChild = item.children[0];
onlyChild.text = item.text + '/' + onlyChild.text;
return onlyChild;
}
}
return item;
});
}
_onItemClick(event) {
let $target = $(event.target);
let download = false;
// Handle middle click
if (event.which === 2) return;
// Handle icon click, fix #122
if ($target.is('i.jstree-icon')) {
$target = $target.parent();
download = true;
}
if (!$target.is('a.jstree-anchor')) return;
// Refocus after complete so that keyboard navigation works, fix #158
const refocusAfterCompletion = () => {
$(document).one('pjax:success page:load', () => {
this.$jstree.get_container().focus();
});
};
const adapter = this.adapter;
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
const href = $target.attr('href');
// The 2nd path is for submodule child links
const $icon = $target.children().length ? $target.children(':first') : $target.siblings(':first');
if ($icon.hasClass('commit')) {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectSubmodule(href);
} else if ($icon.hasClass('blob')) {
if (download) {
const downloadUrl = $target.attr('data-download-url');
const downloadFileName = $target.attr('data-download-filename');
adapter.downloadFile(downloadUrl, downloadFileName);
} else {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
}
}
}
syncSelection(repo) {
const $jstree = this.$jstree;
if (!$jstree) return;
// Convert /username/reponame/object_type/branch/path to path
const path = decodeURIComponent(location.pathname);
const match = path.match(/(?:[^\/]+\/){4}(.*)/);
if (!match) return;
const currentPath = match[1];
const loadAll = this.adapter.canLoadEntireTree(repo) && this.store.get(STORE.LOADALL);
selectPath(loadAll ? [currentPath] : breakPath(currentPath));
// Convert ['a/b'] to ['a', 'a/b']
function breakPath(fullPath) {
return fullPath.split('/').reduce((res, path, idx) => {
res.push(idx === 0 ? path : `${res[idx - 1]}/${path}`);
return res;
}, []);
}
function selectPath(paths, index = 0) {
const nodeId = NODE_PREFIX + paths[index];
if ($jstree.get_node(nodeId)) {
$jstree.deselect_all();
$jstree.select_node(nodeId);
$jstree.open_node(nodeId, () => {
if (++index < paths.length) {
selectPath(paths, index);
}
});
}
}
}
}
| _sort | identifier_name |
view.tree.js | class TreeView {
constructor($dom, store, adapter) {
this.store = store;
this.adapter = adapter;
this.$view = $dom.find('.octotree_treeview');
this.$tree = this.$view
.find('.octotree_view_body')
.on('click.jstree', '.jstree-open>a', ({target}) => {
setTimeout(() => {
this.$jstree.close_node(target)
}, 0);
})
.on('click.jstree', '.jstree-closed>a', ({target}) => {
setTimeout(() => {
this.$jstree.open_node(target)
}, 0);
})
.on('click', this._onItemClick.bind(this))
.jstree({
core: {multiple: false, worker: false, themes: {responsive: false}},
plugins: ['wholerow']
});
}
get $jstree() {
return this.$tree.jstree(true);
}
show(repo, token) {
const $jstree = this.$jstree;
$jstree.settings.core.data = (node, cb) => {
const prMode = this.store.get(STORE.PR) && repo.pullNumber;
const loadAll = this.adapter.canLoadEntireTree(repo) && (prMode || this.store.get(STORE.LOADALL));
node = !loadAll && (node.id === '#' ? {path: ''} : node.original);
this.adapter.loadCodeTree({repo, token, node}, (err, treeData) => {
if (err) {
if (err.status === 206 && loadAll) { // The repo is too big to load all, need to retry
$jstree.refresh(true);
} else {
$(this).trigger(EVENT.FETCH_ERROR, [err]);
}
} else {
treeData = this._sort(treeData);
if (loadAll) {
treeData = this._collapse(treeData);
}
cb(treeData);
}
});
};
this.$tree.one('refresh.jstree', () => {
this.syncSelection(repo);
$(this).trigger(EVENT.VIEW_READY);
});
this._showHeader(repo);
$jstree.refresh(true);
}
_showHeader(repo) {
const adapter = this.adapter;
this.$view
.find('.octotree_view_header')
.html(
`<div class="octotree_header_repo">
<a href="/${repo.username}">${repo.username}</a>
/
<a data-pjax href="/${repo.username}/${repo.reponame}">${repo.reponame}</a>
</div>
<div class="octotree_header_branch">
${this._deXss(repo.branch.toString())}
</div>`
)
.on('click', 'a[data-pjax]', function(event) {
event.preventDefault();
// A.href always return absolute URL, don't want that
const href = $(this).attr('href');
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
});
}
_deXss(str) {
return str && str.replace(/[<>'"&]/g, '-');
}
_sort(folder) {
folder.sort((a, b) => {
if (a.type === b.type) return a.text === b.text ? 0 : a.text < b.text ? -1 : 1;
return a.type === 'blob' ? 1 : -1;
});
folder.forEach((item) => {
if (item.type === 'tree' && item.children !== true && item.children.length > 0) {
this._sort(item.children);
}
});
return folder;
}
_collapse(folder) {
return folder.map((item) => {
if (item.type === 'tree') {
item.children = this._collapse(item.children);
if (item.children.length === 1 && item.children[0].type === 'tree') {
const onlyChild = item.children[0];
onlyChild.text = item.text + '/' + onlyChild.text;
return onlyChild;
}
}
return item;
});
}
_onItemClick(event) {
let $target = $(event.target);
let download = false;
// Handle middle click
if (event.which === 2) return;
// Handle icon click, fix #122
if ($target.is('i.jstree-icon')) {
$target = $target.parent();
download = true;
}
if (!$target.is('a.jstree-anchor')) return;
// Refocus after complete so that keyboard navigation works, fix #158
const refocusAfterCompletion = () => {
$(document).one('pjax:success page:load', () => {
this.$jstree.get_container().focus();
});
};
const adapter = this.adapter;
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
const href = $target.attr('href');
// The 2nd path is for submodule child links
const $icon = $target.children().length ? $target.children(':first') : $target.siblings(':first');
if ($icon.hasClass('commit')) {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectSubmodule(href);
} else if ($icon.hasClass('blob')) {
if (download) {
const downloadUrl = $target.attr('data-download-url');
const downloadFileName = $target.attr('data-download-filename');
adapter.downloadFile(downloadUrl, downloadFileName);
} else {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
}
}
}
syncSelection(repo) {
const $jstree = this.$jstree;
if (!$jstree) return;
// Convert /username/reponame/object_type/branch/path to path
const path = decodeURIComponent(location.pathname);
const match = path.match(/(?:[^\/]+\/){4}(.*)/);
if (!match) return;
const currentPath = match[1];
const loadAll = this.adapter.canLoadEntireTree(repo) && this.store.get(STORE.LOADALL);
selectPath(loadAll ? [currentPath] : breakPath(currentPath));
// Convert ['a/b'] to ['a', 'a/b']
function breakPath(fullPath) {
return fullPath.split('/').reduce((res, path, idx) => {
res.push(idx === 0 ? path : `${res[idx - 1]}/${path}`);
return res;
}, []);
}
function selectPath(paths, index = 0) {
const nodeId = NODE_PREFIX + paths[index];
if ($jstree.get_node(nodeId)) |
}
}
}
| {
$jstree.deselect_all();
$jstree.select_node(nodeId);
$jstree.open_node(nodeId, () => {
if (++index < paths.length) {
selectPath(paths, index);
}
});
} | conditional_block |
view.tree.js | class TreeView {
constructor($dom, store, adapter) {
this.store = store;
this.adapter = adapter;
this.$view = $dom.find('.octotree_treeview');
this.$tree = this.$view
.find('.octotree_view_body')
.on('click.jstree', '.jstree-open>a', ({target}) => {
setTimeout(() => {
this.$jstree.close_node(target)
}, 0);
})
.on('click.jstree', '.jstree-closed>a', ({target}) => {
setTimeout(() => {
this.$jstree.open_node(target)
}, 0);
})
.on('click', this._onItemClick.bind(this))
.jstree({
core: {multiple: false, worker: false, themes: {responsive: false}},
plugins: ['wholerow']
});
}
get $jstree() {
return this.$tree.jstree(true);
}
show(repo, token) {
const $jstree = this.$jstree;
$jstree.settings.core.data = (node, cb) => {
const prMode = this.store.get(STORE.PR) && repo.pullNumber;
const loadAll = this.adapter.canLoadEntireTree(repo) && (prMode || this.store.get(STORE.LOADALL));
node = !loadAll && (node.id === '#' ? {path: ''} : node.original);
this.adapter.loadCodeTree({repo, token, node}, (err, treeData) => {
if (err) {
if (err.status === 206 && loadAll) { // The repo is too big to load all, need to retry
$jstree.refresh(true);
} else {
$(this).trigger(EVENT.FETCH_ERROR, [err]);
}
} else {
treeData = this._sort(treeData);
if (loadAll) {
treeData = this._collapse(treeData);
}
cb(treeData);
}
});
};
this.$tree.one('refresh.jstree', () => {
this.syncSelection(repo);
$(this).trigger(EVENT.VIEW_READY);
});
this._showHeader(repo);
$jstree.refresh(true);
}
_showHeader(repo) {
const adapter = this.adapter;
this.$view
.find('.octotree_view_header')
.html(
`<div class="octotree_header_repo">
<a href="/${repo.username}">${repo.username}</a>
/
<a data-pjax href="/${repo.username}/${repo.reponame}">${repo.reponame}</a>
</div>
<div class="octotree_header_branch">
${this._deXss(repo.branch.toString())}
</div>`
)
.on('click', 'a[data-pjax]', function(event) {
event.preventDefault();
// A.href always return absolute URL, don't want that
const href = $(this).attr('href');
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
});
}
_deXss(str) {
return str && str.replace(/[<>'"&]/g, '-');
}
_sort(folder) |
_collapse(folder) {
return folder.map((item) => {
if (item.type === 'tree') {
item.children = this._collapse(item.children);
if (item.children.length === 1 && item.children[0].type === 'tree') {
const onlyChild = item.children[0];
onlyChild.text = item.text + '/' + onlyChild.text;
return onlyChild;
}
}
return item;
});
}
_onItemClick(event) {
let $target = $(event.target);
let download = false;
// Handle middle click
if (event.which === 2) return;
// Handle icon click, fix #122
if ($target.is('i.jstree-icon')) {
$target = $target.parent();
download = true;
}
if (!$target.is('a.jstree-anchor')) return;
// Refocus after complete so that keyboard navigation works, fix #158
const refocusAfterCompletion = () => {
$(document).one('pjax:success page:load', () => {
this.$jstree.get_container().focus();
});
};
const adapter = this.adapter;
const newTab = event.shiftKey || event.ctrlKey || event.metaKey;
const href = $target.attr('href');
// The 2nd path is for submodule child links
const $icon = $target.children().length ? $target.children(':first') : $target.siblings(':first');
if ($icon.hasClass('commit')) {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectSubmodule(href);
} else if ($icon.hasClass('blob')) {
if (download) {
const downloadUrl = $target.attr('data-download-url');
const downloadFileName = $target.attr('data-download-filename');
adapter.downloadFile(downloadUrl, downloadFileName);
} else {
refocusAfterCompletion();
newTab ? adapter.openInNewTab(href) : adapter.selectFile(href);
}
}
}
syncSelection(repo) {
const $jstree = this.$jstree;
if (!$jstree) return;
// Convert /username/reponame/object_type/branch/path to path
const path = decodeURIComponent(location.pathname);
const match = path.match(/(?:[^\/]+\/){4}(.*)/);
if (!match) return;
const currentPath = match[1];
const loadAll = this.adapter.canLoadEntireTree(repo) && this.store.get(STORE.LOADALL);
selectPath(loadAll ? [currentPath] : breakPath(currentPath));
// Convert ['a/b'] to ['a', 'a/b']
function breakPath(fullPath) {
return fullPath.split('/').reduce((res, path, idx) => {
res.push(idx === 0 ? path : `${res[idx - 1]}/${path}`);
return res;
}, []);
}
function selectPath(paths, index = 0) {
const nodeId = NODE_PREFIX + paths[index];
if ($jstree.get_node(nodeId)) {
$jstree.deselect_all();
$jstree.select_node(nodeId);
$jstree.open_node(nodeId, () => {
if (++index < paths.length) {
selectPath(paths, index);
}
});
}
}
}
}
| {
folder.sort((a, b) => {
if (a.type === b.type) return a.text === b.text ? 0 : a.text < b.text ? -1 : 1;
return a.type === 'blob' ? 1 : -1;
});
folder.forEach((item) => {
if (item.type === 'tree' && item.children !== true && item.children.length > 0) {
this._sort(item.children);
}
});
return folder;
} | identifier_body |
statistic_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
import {Statistic} from 'benchpress/src/statistic';
export function | () {
describe('statistic', () => {
it('should calculate the mean', () => {
expect(Statistic.calculateMean([])).toBeNaN();
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
});
it('should calculate the standard deviation', () => {
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
});
it('should calculate the coefficient of variation', () => {
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
});
it('should calculate the regression slope', () => {
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
});
});
}
| main | identifier_name |
statistic_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
import {Statistic} from 'benchpress/src/statistic';
export function main() {
describe('statistic', () => {
it('should calculate the mean', () => {
expect(Statistic.calculateMean([])).toBeNaN();
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
});
it('should calculate the standard deviation', () => {
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN(); | expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
});
it('should calculate the coefficient of variation', () => {
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
});
it('should calculate the regression slope', () => {
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
});
});
} | expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0); | random_line_split |
statistic_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AsyncTestCompleter, afterEach, beforeEach, ddescribe, describe, expect, iit, inject, it, xit} from '@angular/core/testing/testing_internal';
import {Statistic} from 'benchpress/src/statistic';
export function main() | {
describe('statistic', () => {
it('should calculate the mean', () => {
expect(Statistic.calculateMean([])).toBeNaN();
expect(Statistic.calculateMean([1, 2, 3])).toBe(2.0);
});
it('should calculate the standard deviation', () => {
expect(Statistic.calculateStandardDeviation([], NaN)).toBeNaN();
expect(Statistic.calculateStandardDeviation([1], 1)).toBe(0.0);
expect(Statistic.calculateStandardDeviation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(2.0);
});
it('should calculate the coefficient of variation', () => {
expect(Statistic.calculateCoefficientOfVariation([], NaN)).toBeNaN();
expect(Statistic.calculateCoefficientOfVariation([1], 1)).toBe(0.0);
expect(Statistic.calculateCoefficientOfVariation([2, 4, 4, 4, 5, 5, 7, 9], 5)).toBe(40.0);
});
it('should calculate the regression slope', () => {
expect(Statistic.calculateRegressionSlope([], NaN, [], NaN)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1], 1, [2], 2)).toBeNaN();
expect(Statistic.calculateRegressionSlope([1, 2], 1.5, [2, 4], 3)).toBe(2.0);
});
});
} | identifier_body |
|
edit.state.d.ts | import { Command } from '../command/command';
/**
* Property that controls grid edit unit.
*
* * `'cell'` data is editable through the grid cells.
* * `'row'` data is editable through the grid rows.
* * `'null'` data is not editable.
*/
export declare type EditStateMode = null | 'cell' | 'row';
/**
* Indicates if q-grid is in `'edit'` or in a `'view'` mode.
*/
export declare type EditStateStatus = 'edit' | 'view' | 'startBatch' | 'endBatch';
/**
* Property that controls grid edit behavior.
*
* * `'batch'` batch update.
*/
export declare type EditStateMethod = null | 'batch';
/**
* A class represent options to control q-grid edit mode.
*/
export declare class EditState {
/**
* Property that controls grid edit unit.
*/
mode: EditStateMode;
/**
* Indicates if q-grid is in `'edit'` or in a `'view'` mode.
*/
status: EditStateStatus;
/**
* Property that controls grid edit behavior.
*/
method: EditStateMethod;
/**
* Allows to the grid user to control if cell or row can be edited or not.
*/
enter: Command;
/**
* Allows to the grid user to control if new cell value can be stored in data source or not.
*/
commit: Command;
/**
* Allows to the grid user to control if cell can exit edit mode.
*/
cancel: Command;
/**
* Allows to the grid user to control if cell can exit edit mode.
*/
reset: Command;
/**
* Allows to the grid user to manage clear action behavior in edit mode.
*/ | /**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when cancel command should be execute on key down event.
*/
cancelShortcuts: { [key: string]: string };
/**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when enter command should be execute on key down event.
*/
enterShortcuts: { [key: string]: string };
/**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when commit command should be execute on key down event.
*/
commitShortcuts: { [key: string]: string };
} | clear: Command;
| random_line_split |
edit.state.d.ts | import { Command } from '../command/command';
/**
* Property that controls grid edit unit.
*
* * `'cell'` data is editable through the grid cells.
* * `'row'` data is editable through the grid rows.
* * `'null'` data is not editable.
*/
export declare type EditStateMode = null | 'cell' | 'row';
/**
* Indicates if q-grid is in `'edit'` or in a `'view'` mode.
*/
export declare type EditStateStatus = 'edit' | 'view' | 'startBatch' | 'endBatch';
/**
* Property that controls grid edit behavior.
*
* * `'batch'` batch update.
*/
export declare type EditStateMethod = null | 'batch';
/**
* A class represent options to control q-grid edit mode.
*/
export declare class | {
/**
* Property that controls grid edit unit.
*/
mode: EditStateMode;
/**
* Indicates if q-grid is in `'edit'` or in a `'view'` mode.
*/
status: EditStateStatus;
/**
* Property that controls grid edit behavior.
*/
method: EditStateMethod;
/**
* Allows to the grid user to control if cell or row can be edited or not.
*/
enter: Command;
/**
* Allows to the grid user to control if new cell value can be stored in data source or not.
*/
commit: Command;
/**
* Allows to the grid user to control if cell can exit edit mode.
*/
cancel: Command;
/**
* Allows to the grid user to control if cell can exit edit mode.
*/
reset: Command;
/**
* Allows to the grid user to manage clear action behavior in edit mode.
*/
clear: Command;
/**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when cancel command should be execute on key down event.
*/
cancelShortcuts: { [key: string]: string };
/**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when enter command should be execute on key down event.
*/
enterShortcuts: { [key: string]: string };
/**
* Object that contains `{columnKey: keyboardKeys}` map, that is used by q-grid to manage
* when commit command should be execute on key down event.
*/
commitShortcuts: { [key: string]: string };
}
| EditState | identifier_name |
loaders.py | """
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def tearDown(self):
settings.INSTALLED_APPS = self._old_installed_apps
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self): | unittest.main() | "Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html")
if __name__ == "__main__": | random_line_split |
loaders.py | """
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def | (self):
settings.INSTALLED_APPS = self._old_installed_apps
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self):
"Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html")
if __name__ == "__main__":
unittest.main()
| tearDown | identifier_name |
loaders.py | """
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def tearDown(self):
|
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self):
"Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html")
if __name__ == "__main__":
unittest.main()
| settings.INSTALLED_APPS = self._old_installed_apps | identifier_body |
loaders.py | """
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def tearDown(self):
settings.INSTALLED_APPS = self._old_installed_apps
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self):
"Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html")
if __name__ == "__main__":
| unittest.main() | conditional_block |
|
gui.js | var gui={};
(function() {
try {
var open = require("open");
var fs = require("fs");
} catch(e) {
var open = function(path) {
window.open("file://"+path);
}
}
var state = document.getElementById("state"),
statemsg = document.getElementById("statemsg"),
progress = document.getElementById("progress"),
pickdir = document.getElementById("pickdir"),
table = document.getElementById("collisions_table").tBodies[0];
pickdir.addEventListener("click", function(){
var fc = document.createElement("input");
fc.type = "file";
fc.value = "";
fc.nwdirectory = true;
fc.multiple = true;
state.classList.remove("hidden");
fc.onchange = function() {
analyze_dir(fc.value);
}
fc.click();
}, true);
gui.update_progress = function(rate) {
progress.value = rate;
};
gui.set_statemsg = function (msg) {
statemsg.innerHTML = msg;
};
gui.analyze_authorized = function (auth) {
pickdir.disabled = auth;
};
function readableSize (size) |
function insert_collision (idx, files, dist) {
var row = table.insertRow(idx);
row.dataset["dist"] = dist;
for (var i=0; i<2; i++) {
var cell = row.insertCell(i);
var pathElem = document.createTextNode(files[i].dirname+"/");
var fileNameElem = document.createElement("b");
var sizeElem = document.createElement("i");
var deleteBtn = document.createElement("button");
cell.dataset["filepath"] = files[i].filepath;
fileNameElem.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
open(path);
}, true);
fileNameElem.textContent = files[i].stats.name;
deleteBtn.textContent = "delete";
deleteBtn.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
if (confirm("Delete "+path+"?")) {
fs.unlink(path, function (err) {
if (err) {
alert("Unable to delete "+path);
} else {
var row = e.target.parentElement.parentElement;
row.parentElement.removeChild(row);
}
});
}
}, true);
sizeElem.textContent = readableSize(files[i].stats.size);
cell.appendChild(pathElem);
cell.appendChild(fileNameElem);
cell.appendChild(sizeElem);
cell.appendChild(deleteBtn);
}
cell = row.insertCell(2);
cell.textContent = dist;
};
gui.display_collision = function (files, dist) {
for (var idx=0; idx < table.rows.length; idx++) { //May not be necessary to do a dichotomy
if (table.rows[idx].dataset["dist"] >= dist) break;
}
insert_collision(idx, files, dist);
};
gui.init_display_collisions = function() {
table.parentElement.classList.remove("hidden");
table.innerHTML = "";
};
gui.all_collisions_displayed = function (ndoublets) {
gui.set_statemsg(ndoublets + " collisions found");
gui.update_progress(1);
};
})();
| {
if (size > 1e9) return ((size/1e8|0)/10) + " Gb";
else if (size > 1e6) return ((size/1e5|0)/10) + " Mb";
else if (size > 1e3) return ((size/1e2|0)/10) + " Kb";
else return size+" bytes";
} | identifier_body |
gui.js | var gui={};
(function() {
try {
var open = require("open");
var fs = require("fs");
} catch(e) {
var open = function(path) {
window.open("file://"+path);
}
}
var state = document.getElementById("state"),
statemsg = document.getElementById("statemsg"),
progress = document.getElementById("progress"),
pickdir = document.getElementById("pickdir"),
table = document.getElementById("collisions_table").tBodies[0];
pickdir.addEventListener("click", function(){
var fc = document.createElement("input");
fc.type = "file";
fc.value = "";
fc.nwdirectory = true;
fc.multiple = true;
state.classList.remove("hidden");
fc.onchange = function() {
analyze_dir(fc.value);
}
fc.click();
}, true);
gui.update_progress = function(rate) {
progress.value = rate;
};
gui.set_statemsg = function (msg) {
statemsg.innerHTML = msg;
};
gui.analyze_authorized = function (auth) {
pickdir.disabled = auth;
};
function readableSize (size) {
if (size > 1e9) return ((size/1e8|0)/10) + " Gb";
else if (size > 1e6) return ((size/1e5|0)/10) + " Mb";
else if (size > 1e3) return ((size/1e2|0)/10) + " Kb";
else return size+" bytes";
}
function insert_collision (idx, files, dist) {
var row = table.insertRow(idx);
row.dataset["dist"] = dist;
for (var i=0; i<2; i++) {
var cell = row.insertCell(i);
var pathElem = document.createTextNode(files[i].dirname+"/");
var fileNameElem = document.createElement("b");
var sizeElem = document.createElement("i");
var deleteBtn = document.createElement("button"); | fileNameElem.textContent = files[i].stats.name;
deleteBtn.textContent = "delete";
deleteBtn.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
if (confirm("Delete "+path+"?")) {
fs.unlink(path, function (err) {
if (err) {
alert("Unable to delete "+path);
} else {
var row = e.target.parentElement.parentElement;
row.parentElement.removeChild(row);
}
});
}
}, true);
sizeElem.textContent = readableSize(files[i].stats.size);
cell.appendChild(pathElem);
cell.appendChild(fileNameElem);
cell.appendChild(sizeElem);
cell.appendChild(deleteBtn);
}
cell = row.insertCell(2);
cell.textContent = dist;
};
gui.display_collision = function (files, dist) {
for (var idx=0; idx < table.rows.length; idx++) { //May not be necessary to do a dichotomy
if (table.rows[idx].dataset["dist"] >= dist) break;
}
insert_collision(idx, files, dist);
};
gui.init_display_collisions = function() {
table.parentElement.classList.remove("hidden");
table.innerHTML = "";
};
gui.all_collisions_displayed = function (ndoublets) {
gui.set_statemsg(ndoublets + " collisions found");
gui.update_progress(1);
};
})(); | cell.dataset["filepath"] = files[i].filepath;
fileNameElem.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
open(path);
}, true); | random_line_split |
gui.js | var gui={};
(function() {
try {
var open = require("open");
var fs = require("fs");
} catch(e) {
var open = function(path) {
window.open("file://"+path);
}
}
var state = document.getElementById("state"),
statemsg = document.getElementById("statemsg"),
progress = document.getElementById("progress"),
pickdir = document.getElementById("pickdir"),
table = document.getElementById("collisions_table").tBodies[0];
pickdir.addEventListener("click", function(){
var fc = document.createElement("input");
fc.type = "file";
fc.value = "";
fc.nwdirectory = true;
fc.multiple = true;
state.classList.remove("hidden");
fc.onchange = function() {
analyze_dir(fc.value);
}
fc.click();
}, true);
gui.update_progress = function(rate) {
progress.value = rate;
};
gui.set_statemsg = function (msg) {
statemsg.innerHTML = msg;
};
gui.analyze_authorized = function (auth) {
pickdir.disabled = auth;
};
function | (size) {
if (size > 1e9) return ((size/1e8|0)/10) + " Gb";
else if (size > 1e6) return ((size/1e5|0)/10) + " Mb";
else if (size > 1e3) return ((size/1e2|0)/10) + " Kb";
else return size+" bytes";
}
function insert_collision (idx, files, dist) {
var row = table.insertRow(idx);
row.dataset["dist"] = dist;
for (var i=0; i<2; i++) {
var cell = row.insertCell(i);
var pathElem = document.createTextNode(files[i].dirname+"/");
var fileNameElem = document.createElement("b");
var sizeElem = document.createElement("i");
var deleteBtn = document.createElement("button");
cell.dataset["filepath"] = files[i].filepath;
fileNameElem.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
open(path);
}, true);
fileNameElem.textContent = files[i].stats.name;
deleteBtn.textContent = "delete";
deleteBtn.addEventListener("click",function(e) {
var path = e.target.parentElement.dataset["filepath"];
if (confirm("Delete "+path+"?")) {
fs.unlink(path, function (err) {
if (err) {
alert("Unable to delete "+path);
} else {
var row = e.target.parentElement.parentElement;
row.parentElement.removeChild(row);
}
});
}
}, true);
sizeElem.textContent = readableSize(files[i].stats.size);
cell.appendChild(pathElem);
cell.appendChild(fileNameElem);
cell.appendChild(sizeElem);
cell.appendChild(deleteBtn);
}
cell = row.insertCell(2);
cell.textContent = dist;
};
gui.display_collision = function (files, dist) {
for (var idx=0; idx < table.rows.length; idx++) { //May not be necessary to do a dichotomy
if (table.rows[idx].dataset["dist"] >= dist) break;
}
insert_collision(idx, files, dist);
};
gui.init_display_collisions = function() {
table.parentElement.classList.remove("hidden");
table.innerHTML = "";
};
gui.all_collisions_displayed = function (ndoublets) {
gui.set_statemsg(ndoublets + " collisions found");
gui.update_progress(1);
};
})();
| readableSize | identifier_name |
HumidTemp.js | const sensor = require('node-dht-sensor');
const logger = require('../logging/Logger');
/**
* Reads pin 4 of the raspberry PI to obtain temperature and humidity information.
* @return {Promise} A promise that will resolve with the results. In the
* case where there was an error reading, will return a zero filled object,
* with an additional error field.
* { temperature: Number,
* humidity: Number,
* error: Error|undefined }
*/
exports.getHumidityTemperature = function() {
return new Promise( (resolve, reject) => {
sensor.read(22, 4, (err, temperature, humidity) => {
if(err) |
resolve({
temperature: temperature * 1.8 + 32,
humidity: humidity});
});
});
}
| {
logger.error("Could not read from the DHT sensor. " + err);
return resolve({
temperature: 0,
humidity: 0,
error: err});
} | conditional_block |
HumidTemp.js | const sensor = require('node-dht-sensor');
const logger = require('../logging/Logger');
/**
* Reads pin 4 of the raspberry PI to obtain temperature and humidity information.
* @return {Promise} A promise that will resolve with the results. In the
* case where there was an error reading, will return a zero filled object,
* with an additional error field.
* { temperature: Number,
* humidity: Number,
* error: Error|undefined }
*/
exports.getHumidityTemperature = function() {
return new Promise( (resolve, reject) => {
sensor.read(22, 4, (err, temperature, humidity) => {
if(err) {
logger.error("Could not read from the DHT sensor. " + err);
return resolve({
temperature: 0,
humidity: 0,
error: err});
} | resolve({
temperature: temperature * 1.8 + 32,
humidity: humidity});
});
});
} | random_line_split |
|
issue-17441.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn | () {
let _foo = &[1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `&[usize; 2]` as `[usize]`
//~^^ HELP consider using an implicit coercion to `&[usize]` instead
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let _bar = Box::new(1_usize) as std::fmt::Debug;
//~^ ERROR cast to unsized type: `Box<usize>` as `core::fmt::Debug`
//~^^ HELP did you mean `Box<core::fmt::Debug>`?
let _baz = 1_usize as std::fmt::Debug;
//~^ ERROR cast to unsized type: `usize` as `core::fmt::Debug`
//~^^ HELP consider using a box or reference as appropriate
let _quux = [1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `[usize; 2]` as `[usize]`
//~^^ HELP consider using a box or reference as appropriate
}
| main | identifier_name |
issue-17441.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() | {
let _foo = &[1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `&[usize; 2]` as `[usize]`
//~^^ HELP consider using an implicit coercion to `&[usize]` instead
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let _bar = Box::new(1_usize) as std::fmt::Debug;
//~^ ERROR cast to unsized type: `Box<usize>` as `core::fmt::Debug`
//~^^ HELP did you mean `Box<core::fmt::Debug>`?
let _baz = 1_usize as std::fmt::Debug;
//~^ ERROR cast to unsized type: `usize` as `core::fmt::Debug`
//~^^ HELP consider using a box or reference as appropriate
let _quux = [1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `[usize; 2]` as `[usize]`
//~^^ HELP consider using a box or reference as appropriate
} | identifier_body |
|
issue-17441.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// |
fn main() {
let _foo = &[1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `&[usize; 2]` as `[usize]`
//~^^ HELP consider using an implicit coercion to `&[usize]` instead
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let _bar = Box::new(1_usize) as std::fmt::Debug;
//~^ ERROR cast to unsized type: `Box<usize>` as `core::fmt::Debug`
//~^^ HELP did you mean `Box<core::fmt::Debug>`?
let _baz = 1_usize as std::fmt::Debug;
//~^ ERROR cast to unsized type: `usize` as `core::fmt::Debug`
//~^^ HELP consider using a box or reference as appropriate
let _quux = [1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `[usize; 2]` as `[usize]`
//~^^ HELP consider using a box or reference as appropriate
} | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. | random_line_split |
main.ts | class | extends Rf.ETS.FrameWork.GameMain
{
private group: Rf.ETS.FrameWork.Group = null;
private sprite: Rf.ETS.FrameWork.Sprite = null;
private touchCharactor: Rf.ETS.FrameWork.Character = null;
private touchCharactorTouchPosX:number = 0;
private touchCharactorTouchPosY:number = 0;
/**
* 初期化イベント
* @method
* @name FrameWork.GameMain#onInitialize
*/
protected onInitialize():void
{
//サイズを640x640に変更
this.screenHeight = 640;
//fpsを10から30に変更
this.fps = 30;
}
/**
* リソース設定イベント
* @method
* @name FrameWork.GameMain#resourceLoad
*/
protected onResourceSetting():void
{
this.resourceManager.SetResourcePath("./assets/resources/");
this.resourceManager.AddResourceName("charaImage", "chara.png");
}
/**
* ロードイベント
* @method
* @name FrameWork.GameMain#onLoad
* @param {Object} parent - 親Group
*/
protected onLoad(parent: enchant.Group):void
{
//グループインスタンス作成
this.group = new Rf.ETS.FrameWork.Group(parent);
this.group.y = 100;
//画像無しスプライトインスタンス作成
let surface: Rf.ETS.FrameWork.NoImageSprite =
new Rf.ETS.FrameWork.NoImageSprite(100, 100, this.group);
surface.SetSurface("rgb(128,255,255)");
surface.opacity = 0.5;
//スプライトインスタンス作成
this.sprite = new Rf.ETS.FrameWork.Sprite(32, 32, this.group);
this.sprite.FileName = this.resourceManager.GetResourceName("charaImage");
this.sprite.originX = 16; //中心で回転するように設定
this.sprite.originY = 16; //中心で回転するように設定
this.sprite.frame = 26*2; //サンプル画像で正面画像を表示する
//タッチイベント用キャラクタ
this.touchCharactor = new Rf.ETS.FrameWork.Character(32,32,parent);
this.touchCharactor.FileName = this.resourceManager.GetResourceName("charaImage");
this.touchCharactor.charaIndex = 3;
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.x = 32;
this.touchCharactor.y = 32;
this.touchCharactor.originX = 16*2;
this.touchCharactor.originY = 16*2;
this.touchCharactor.scale(2.0,2.0);
this.touchCharactor.maxWaitCount = 6;
this.touchCharactor.addEventListener(enchant.Event.TOUCH_START,(e:enchant.Event)=>{
//タッチ開始時は前を向いて、アニメーションを停止させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Down;
this.touchCharactor.SuspendAnime();
this.touchCharactorTouchPosX = this.touchCharactor.x - e.x;
this.touchCharactorTouchPosY = this.touchCharactor.y - e.y;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_MOVE,(e:enchant.Event)=>{
//タッチ中はその位置にキャラクタを移動させる
this.touchCharactor.x = e.x + this.touchCharactorTouchPosX;
this.touchCharactor.y = e.y + this.touchCharactorTouchPosY;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_END,(e:enchant.Event)=>{
//タッチ終了時は後ろを向いて、アニメーションを再開させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.ResumeAnime();
});
}
/**
* 実行イベント
* @method
* @name FrameWork.GameMain#onRun
*/
protected onRun():void
{
//グループを右に移動する
this.group.x += 2;
if (this.group.x >= 200) {
this.group.x = 0;
}
//グループ内メンバのスプライトを回転させる
this.sprite.rotation += 5;
if (this.sprite.rotation >= 360) {
this.sprite.rotation = 0;
}
//タッチイベント用キャラクタのアニメーションを実行する
this.touchCharactor.Run();
}
}
//メインクラスのインスタンス作成
createMain(GameMain); | GameMain | identifier_name |
main.ts | class GameMain extends Rf.ETS.FrameWork.GameMain
{
private group: Rf.ETS.FrameWork.Group = null;
private sprite: Rf.ETS.FrameWork.Sprite = null;
private touchCharactor: Rf.ETS.FrameWork.Character = null;
private touchCharactorTouchPosX:number = 0;
private touchCharactorTouchPosY:number = 0;
/**
* 初期化イベント
* @method
* @name FrameWork.GameMain#onInitialize
*/
protected onInitialize():void
{
//サイズを640x640に変更
this.screenHeight = 640;
//fpsを10から30に変更
this.fps = 30;
}
/**
* リソース設定イベント
* @method
* @name FrameWork.GameMain#resourceLoad
*/
protected onResourceSetting():void
{
this.resourceManager.SetResourcePath("./assets/resources/");
this.resourceManager.AddResourceName("charaImage", "chara.png");
}
/**
* ロードイベント
* @method
* @name FrameWork.GameMain#onLoad
* @param {Object} parent - 親Group
*/
protected onLoad(parent: enchant.Group):void
{
//グループインスタンス作成
this.group = new Rf.ETS.FrameWork.Group(parent);
this.group.y = 100;
//画像無しスプライトインスタンス作成
let surface: Rf.ETS.FrameWork.NoImageSprite =
new Rf.ETS.FrameWork.NoImageSprite(100, 100, this.group);
surface.SetSurface("rgb(128,255,255)");
surface.opacity = 0.5;
//スプライトインスタンス作成
this.sprite = new Rf.ETS.FrameWork.Sprite(32, 32, this.group);
this.sprite.FileName = this.resourceManager.GetResourceName("charaImage");
this.sprite.originX = 16; //中心で回転するように設定
this.sprite.originY = 16; //中心で回転するように設定
this.sprite.frame = 26*2; //サンプル画像で正面画像を表示する
//タッチイベント用キャラクタ
| this.touchCharactor.y = 32;
this.touchCharactor.originX = 16*2;
this.touchCharactor.originY = 16*2;
this.touchCharactor.scale(2.0,2.0);
this.touchCharactor.maxWaitCount = 6;
this.touchCharactor.addEventListener(enchant.Event.TOUCH_START,(e:enchant.Event)=>{
//タッチ開始時は前を向いて、アニメーションを停止させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Down;
this.touchCharactor.SuspendAnime();
this.touchCharactorTouchPosX = this.touchCharactor.x - e.x;
this.touchCharactorTouchPosY = this.touchCharactor.y - e.y;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_MOVE,(e:enchant.Event)=>{
//タッチ中はその位置にキャラクタを移動させる
this.touchCharactor.x = e.x + this.touchCharactorTouchPosX;
this.touchCharactor.y = e.y + this.touchCharactorTouchPosY;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_END,(e:enchant.Event)=>{
//タッチ終了時は後ろを向いて、アニメーションを再開させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.ResumeAnime();
});
}
/**
* 実行イベント
* @method
* @name FrameWork.GameMain#onRun
*/
protected onRun():void
{
//グループを右に移動する
this.group.x += 2;
if (this.group.x >= 200) {
this.group.x = 0;
}
//グループ内メンバのスプライトを回転させる
this.sprite.rotation += 5;
if (this.sprite.rotation >= 360) {
this.sprite.rotation = 0;
}
//タッチイベント用キャラクタのアニメーションを実行する
this.touchCharactor.Run();
}
}
//メインクラスのインスタンス作成
createMain(GameMain); | this.touchCharactor = new Rf.ETS.FrameWork.Character(32,32,parent);
this.touchCharactor.FileName = this.resourceManager.GetResourceName("charaImage");
this.touchCharactor.charaIndex = 3;
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.x = 32;
| random_line_split |
main.ts | class GameMain extends Rf.ETS.FrameWork.GameMain
{
private group: Rf.ETS.FrameWork.Group = null;
private sprite: Rf.ETS.FrameWork.Sprite = null;
private touchCharactor: Rf.ETS.FrameWork.Character = null;
private touchCharactorTouchPosX:number = 0;
private touchCharactorTouchPosY:number = 0;
/**
* 初期化イベント
* @method
* @name FrameWork.GameMain#onInitialize
*/
protected onInitialize():void
{
//サイズを640x640に変更
this.screenHeight = 640;
//fpsを10から30に変更
this.fps = 30;
}
/**
* リソース設定イベント
* @method
* @name FrameWork.GameMain#resourceLoad
*/
protected onResourceSetting():void
{
this.resourceManager.SetResourcePath("./assets/resources/");
this.resourceManager.AddResourceName("charaImage", "chara.png");
}
/**
* ロードイベント
* @method
* @name FrameWork.GameMain#onLoad
* @param {Object} parent - 親Group
*/
protected onLoad(parent: enchant.Group):void
{
//グループインスタンス作成
this.group = new Rf.ETS.FrameWork.Group(parent);
this.group.y = 100;
//画像無しスプライトインスタンス作成
let surface: Rf.ETS.FrameWork.NoImageSprite =
new Rf.ETS.FrameWork.NoImageSprite(100, 100, this.group);
surface.SetSurface("rgb(128,255,255)");
surface.opacity = 0.5;
//スプライトインスタンス作成
this.sprite = new Rf.ETS.FrameWork.Sprite(32, 32, this.group);
this.sprite.FileName = this.resourceManager.GetResourceName("charaImage");
this.sprite.originX = 16; //中心で回転するように設定
this.sprite.originY = 16; //中心で回転するように設定
this.sprite.frame = 26*2; //サンプル画像で正面画像を表示する
//タッチイベント用キャラクタ
this.touchCharactor = new Rf.ETS.FrameWork.Character(32,32,parent);
this.touchCharactor.FileName = this.resourceManager.GetResourceName("charaImage");
this.touchCharactor.charaIndex = 3;
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.x = 32;
this.touchCharactor.y = 32;
this.touchCharactor.originX = 16*2;
this.touchCharactor.originY = 16*2;
this.touchCharactor.scale(2.0,2.0);
this.touchCharactor.maxWaitCount = 6;
this.touchCharactor.addEventListener(enchant.Event.TOUCH_START,(e:enchant.Event)=>{
//タッチ開始時は前を向いて、アニメーションを停止させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Down;
this.touchCharactor.SuspendAnime();
this.touchCharactorTouchPosX = this.touchCharactor.x - e.x;
this.touchCharactorTouchPosY = this.touchCharactor.y - e.y;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_MOVE,(e:enchant.Event)=>{
//タッチ中はその位置にキャラクタを移動させる
this.touchCharactor.x = e.x + this.touchCharactorTouchPosX;
this.touchCharactor.y = e.y + this.touchCharactorTouchPosY;
});
this.touchCharactor.addEventListener(enchant.Event.TOUCH_END,(e:enchant.Event)=>{
//タッチ終了時は後ろを向いて、アニメーションを再開させる
this.touchCharactor.Dir = Rf.ETS.FrameWork.Direction.Up;
this.touchCharactor.ResumeAnime();
});
}
/**
* 実行イベント
* @method
* @name FrameWork.GameMain#onRun
*/
protected onRun():void
{
//グループを右に移動する
this.group.x += 2;
if (this.group.x >= 200) {
this.group.x = 0;
}
//グループ内メンバのスプライトを回転させる
this.sprite.rotation += 5;
if (this.sprite.rotation >= 360) {
this.sprite.rotation = 0;
}
//タッチイベント用キャラクタのアニメーションを実行する
this.touchCharactor.Run();
}
}
//メインクラスのインスタンス作成
createMain(GameMain); | conditional_block |
||
test_markup.py | # Check translations of pango markup
#
# This will look for translatable strings that appear to contain markup and
# check that the markup in the translation matches.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <[email protected]>
try:
import polib
except ImportError:
print("You need to install the python-polib package to read translations")
raise
from pocketlint.pangocheck import is_markup, markup_match
import xml.etree.ElementTree as ET
def test_markup(mofile):
mo = polib.mofile(mofile)
for entry in mo.translated_entries():
if is_markup(entry.msgid):
# If this is a plural, check each of the plural translations
if entry.msgid_plural:
xlations = entry.msgstr_plural
else:
xlations = {None: entry.msgstr}
| for plural_id, msgstr in xlations.items():
# Check if the markup is valid at all
try:
# pylint: disable=unescaped-markup
ET.fromstring('<markup>%s</markup>' % msgstr)
except ET.ParseError:
if entry.msgid_plural:
raise AssertionError("Invalid markup translation for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Invalid markup translation for msgid %s" % entry.msgid)
# Check if the markup has the same number and kind of tags
if not markup_match(entry.msgid, msgstr):
if entry.msgid_plural:
raise AssertionError("Markup does not match for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Markup does not match for msgid %s" % entry.msgid) | random_line_split |
|
test_markup.py | # Check translations of pango markup
#
# This will look for translatable strings that appear to contain markup and
# check that the markup in the translation matches.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <[email protected]>
try:
import polib
except ImportError:
print("You need to install the python-polib package to read translations")
raise
from pocketlint.pangocheck import is_markup, markup_match
import xml.etree.ElementTree as ET
def | (mofile):
mo = polib.mofile(mofile)
for entry in mo.translated_entries():
if is_markup(entry.msgid):
# If this is a plural, check each of the plural translations
if entry.msgid_plural:
xlations = entry.msgstr_plural
else:
xlations = {None: entry.msgstr}
for plural_id, msgstr in xlations.items():
# Check if the markup is valid at all
try:
# pylint: disable=unescaped-markup
ET.fromstring('<markup>%s</markup>' % msgstr)
except ET.ParseError:
if entry.msgid_plural:
raise AssertionError("Invalid markup translation for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Invalid markup translation for msgid %s" % entry.msgid)
# Check if the markup has the same number and kind of tags
if not markup_match(entry.msgid, msgstr):
if entry.msgid_plural:
raise AssertionError("Markup does not match for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Markup does not match for msgid %s" % entry.msgid)
| test_markup | identifier_name |
test_markup.py | # Check translations of pango markup
#
# This will look for translatable strings that appear to contain markup and
# check that the markup in the translation matches.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <[email protected]>
try:
import polib
except ImportError:
print("You need to install the python-polib package to read translations")
raise
from pocketlint.pangocheck import is_markup, markup_match
import xml.etree.ElementTree as ET
def test_markup(mofile):
mo = polib.mofile(mofile)
for entry in mo.translated_entries():
if is_markup(entry.msgid):
# If this is a plural, check each of the plural translations
if entry.msgid_plural:
xlations = entry.msgstr_plural
else:
xlations = {None: entry.msgstr}
for plural_id, msgstr in xlations.items():
# Check if the markup is valid at all
try:
# pylint: disable=unescaped-markup
ET.fromstring('<markup>%s</markup>' % msgstr)
except ET.ParseError:
if entry.msgid_plural:
raise AssertionError("Invalid markup translation for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Invalid markup translation for msgid %s" % entry.msgid)
# Check if the markup has the same number and kind of tags
if not markup_match(entry.msgid, msgstr):
if entry.msgid_plural:
raise AssertionError("Markup does not match for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
| raise AssertionError("Markup does not match for msgid %s" % entry.msgid) | conditional_block |
|
test_markup.py | # Check translations of pango markup
#
# This will look for translatable strings that appear to contain markup and
# check that the markup in the translation matches.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <[email protected]>
try:
import polib
except ImportError:
print("You need to install the python-polib package to read translations")
raise
from pocketlint.pangocheck import is_markup, markup_match
import xml.etree.ElementTree as ET
def test_markup(mofile):
| mo = polib.mofile(mofile)
for entry in mo.translated_entries():
if is_markup(entry.msgid):
# If this is a plural, check each of the plural translations
if entry.msgid_plural:
xlations = entry.msgstr_plural
else:
xlations = {None: entry.msgstr}
for plural_id, msgstr in xlations.items():
# Check if the markup is valid at all
try:
# pylint: disable=unescaped-markup
ET.fromstring('<markup>%s</markup>' % msgstr)
except ET.ParseError:
if entry.msgid_plural:
raise AssertionError("Invalid markup translation for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Invalid markup translation for msgid %s" % entry.msgid)
# Check if the markup has the same number and kind of tags
if not markup_match(entry.msgid, msgstr):
if entry.msgid_plural:
raise AssertionError("Markup does not match for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Markup does not match for msgid %s" % entry.msgid) | identifier_body |
|
index.d.ts | // Type definitions for animejs 2.0
// Project: http://animejs.com
// Definitions by: Andrew Babin <https://github.com/A-Babin>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.4
type FunctionBasedParamter = (element: HTMLElement, index: number, length: number) => number;
type AnimeCallbackFunction = (anim: anime.AnimeInstance) => void;
// Allowing null is necessary because DOM queries may not return anything.
type AnimeTarget = string | object | HTMLElement | SVGElement | NodeList | null;
declare namespace anime {
type EasingOptions =
| "linear"
| "easeInQuad"
| "easeInCubic"
| "easeInQuart"
| "easeInQuint"
| "easeInSine"
| "easeInExpo"
| "easeInCirc"
| "easeInBack"
| "easeInElastic"
| "easeOutQuad"
| "easeOutCubic"
| "easeOutQuart"
| "easeOutQuint"
| "easeOutSine"
| "easeOutExpo"
| "easeOutCirc"
| "easeOutBack"
| "easeOutElastic"
| "easeInOutQuad"
| "easeInOutCubic"
| "easeInOutQuart"
| "easeInOutQuint"
| "easeInOutSine"
| "easeInOutExpo"
| "easeInOutCirc"
| "easeInOutBack"
| "easeInOutElastic";
type DirectionOptions = "reverse" | "alternate" | "normal";
interface AnimeInstanceParams {
loop?: number | boolean;
autoplay?: boolean;
direction?: DirectionOptions | string;
begin?: AnimeCallbackFunction;
run?: AnimeCallbackFunction;
update?: AnimeCallbackFunction;
complete?: AnimeCallbackFunction;
}
interface AnimeAnimParams {
targets: AnimeTarget | ReadonlyArray<AnimeTarget>;
duration?: number | FunctionBasedParamter;
delay?: number | FunctionBasedParamter;
elasticity?: number | FunctionBasedParamter;
round?: number | boolean | FunctionBasedParamter;
easing?: EasingOptions | string | ReadonlyArray<number>;
begin?: AnimeCallbackFunction;
run?: AnimeCallbackFunction;
update?: AnimeCallbackFunction;
complete?: AnimeCallbackFunction;
[AnyAnimatedProperty: string]: any;
}
interface AnimeParams extends AnimeInstanceParams, AnimeAnimParams {
// Just need this to merge both Params interfaces.
}
interface AnimeInstance {
play(): void;
pause(): void;
restart(): void;
reverse(): void;
seek(time: number): void;
began: boolean;
paused: boolean;
completed: boolean;
finished: Promise<void>;
begin: AnimeCallbackFunction;
run: AnimeCallbackFunction;
update: AnimeCallbackFunction;
complete: AnimeCallbackFunction;
autoplay: boolean;
currentTime: number;
delay: number;
direction: string;
duration: number;
loop: number | boolean;
offset: number;
progress: number;
remaining: number;
reversed: boolean;
animatables: ReadonlyArray<object>;
animations: ReadonlyArray<object>;
}
interface AnimeTimelineAnimParams extends AnimeAnimParams {
offset: number | string | FunctionBasedParamter;
}
interface AnimeTimelineInstance extends AnimeInstance {
add(params: AnimeAnimParams): AnimeTimelineInstance;
}
// Helpers
const speed: number;
const running: AnimeInstance[];
const easings: { [EasingFunction: string]: (t: number) => any };
function remove(targets: AnimeTarget | ReadonlyArray<AnimeTarget>): void;
function getValue(targets: AnimeTarget, prop: string): string | number;
function path(path: string | HTMLElement | SVGElement | null, percent?: number): (prop: string) => {
el: HTMLElement | SVGElement,
property: string,
totalLength: number
};
function setDashoffset(el: HTMLElement | SVGElement | null): number;
function bezier(x1: number, y1: number, x2: number, y2: number): (t: number) => number;
// Timeline | function random(min: number, max: number): number;
}
declare function anime(params: anime.AnimeParams): anime.AnimeInstance;
export = anime;
export as namespace anime; | function timeline(params?: AnimeInstanceParams | ReadonlyArray<AnimeInstance>): AnimeTimelineInstance; | random_line_split |
run_CNN_SAT.py | # Copyright 2015 Tianchuan Du University of Delaware
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
import gzip
import numpy
import os
import sys
import theano
from theano.tensor.shared_randomstreams import RandomStreams
import time
from io_func.model_io import _nnet2file, _file2nnet, _cfg2file, log
from learning.sgd import train_sgd_verbose, validate_by_minibatch_verbose
from models.cnn_sat import CNN_SAT
import theano.tensor as T
from utils.network_config import NetworkConfig
from utils.utils import parse_arguments
# Implements the Speaker Adaptive Training of DNNs proposed in the following papers:
# [1] Yajie Miao, Hao Zhang, Florian Metze. "Towards Speaker Adaptive Training of Deep
# Neural Network Acoustic Models". Interspeech 2014.
# [2] Yajie Miao, Lu Jiang, Hao Zhang, Florian Metze. "Improvements to Speaker Adaptive
# Training of Deep Neural Networks". SLT 2014.
if __name__ == '__main__':
# check the arguments
arg_elements = [sys.argv[i] for i in range(1, len(sys.argv))]
arguments = parse_arguments(arg_elements)
required_arguments = ['train_data', 'valid_data', 'si_nnet_spec', 'si_conv_nnet_spec', 'wdir', 'adapt_nnet_spec', 'init_model']
for arg in required_arguments:
if arguments.has_key(arg) == False:
print "Error: the argument %s has to be specified" % (arg); exit(1)
# mandatory arguments
train_data_spec = arguments['train_data']; valid_data_spec = arguments['valid_data']
si_nnet_spec = arguments['si_nnet_spec']
si_conv_nnet_spec = arguments['si_conv_nnet_spec']
adapt_nnet_spec = arguments['adapt_nnet_spec'];
wdir = arguments['wdir']
init_model_file = arguments['init_model']
# parse network configuration from arguments, and initialize data reading
cfg_si = NetworkConfig(); cfg_si.model_type = 'CNN'
cfg_si.parse_config_cnn(arguments, '10:' + si_nnet_spec, si_conv_nnet_spec)
cfg_si.init_data_reading(train_data_spec, valid_data_spec)
# parse the structure of the i-vector network
cfg_adapt = NetworkConfig()
net_split = adapt_nnet_spec.split(':')
adapt_nnet_spec = ''
for n in xrange(len(net_split) - 1):
adapt_nnet_spec += net_split[n] + ':'
cfg_adapt.parse_config_dnn(arguments, adapt_nnet_spec + '0')
numpy_rng = numpy.random.RandomState(89677)
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
log('> ... initializing the model')
# setup up the model
dnn = CNN_SAT(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg_si = cfg_si, cfg_adapt = cfg_adapt)
# read the initial DNN (the SI DNN which has been well trained)
# _file2nnet(dnn.cnn_si.layers, filename = init_model_file)
_file2nnet(dnn.cnn_si.layers, filename = 'BKUP/nnet.param.si')
_file2nnet(dnn.dnn_adapt.layers, filename = 'BKUP/nnet.param.adapt')
# get the training and validation functions for adaptation network training
dnn.params = dnn.dnn_adapt.params # only update the parameters of the i-vector nnet
dnn.delta_params = dnn.dnn_adapt.delta_params
log('> ... getting the finetuning functions for iVecNN')
train_fn, valid_fn = dnn.build_finetune_functions(
(cfg_si.train_x, cfg_si.train_y), (cfg_si.valid_x, cfg_si.valid_y),
batch_size = cfg_adapt.batch_size)
log('> ... learning the adaptation network')
cfg = cfg_adapt
while (cfg.lrate.get_rate() != 0):
# one epoch of sgd training
# train_error = train_sgd_verbose(train_fn, cfg_si.train_sets, cfg_si.train_xy,
# cfg.batch_size, cfg.lrate.get_rate(), cfg.momentum)
# log('> epoch %d, training error %f ' % (cfg.lrate.epoch, 100*numpy.mean(train_error)) + '(%)')
# validation
|
# save the model and network configuration
if cfg.param_output_file != '':
_nnet2file(dnn.dnn_adapt.layers, filename = cfg.param_output_file + '.adapt',
input_factor = cfg_adapt.input_dropout_factor, factor = cfg_adapt.dropout_factor)
_nnet2file(dnn.cnn_si.layers, filename = cfg.param_output_file + '.si',
input_factor = cfg_si.input_dropout_factor, factor = cfg_si.dropout_factor)
log('> ... the final PDNN model parameter is ' + cfg.param_output_file + ' (.si, .adapt)')
if cfg.cfg_output_file != '':
_cfg2file(cfg_adapt, filename=cfg.cfg_output_file + '.adapt')
_cfg2file(cfg_si, filename=cfg.cfg_output_file + '.si')
log('> ... the final PDNN model config is ' + cfg.cfg_output_file + ' (.si, .adapt)')
# output the model into Kaldi-compatible format
if cfg.kaldi_output_file != '':
dnn.cnn_si.fc_dnn.write_model_to_kaldi(cfg.kaldi_output_file + '.si')
dnn.dnn_adapt.write_model_to_kaldi(cfg.kaldi_output_file + '.adapt', with_softmax = False)
log('> ... the final Kaldi model is ' + cfg.kaldi_output_file + ' (.si, .adapt)')
| valid_error = validate_by_minibatch_verbose(valid_fn, cfg_si.valid_sets, cfg_si.valid_xy, cfg.batch_size)
log('> epoch %d, lrate %f, validation error %f ' % (cfg.lrate.epoch, cfg.lrate.get_rate(), 100*numpy.mean(valid_error)) + '(%)')
cfg.lrate.get_next_rate(current_error = 100 * numpy.mean(valid_error))
cfg.lrate.rate = 0 | conditional_block |
run_CNN_SAT.py | # Copyright 2015 Tianchuan Du University of Delaware
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
import gzip
import numpy
import os
import sys
import theano
from theano.tensor.shared_randomstreams import RandomStreams
import time
from io_func.model_io import _nnet2file, _file2nnet, _cfg2file, log
from learning.sgd import train_sgd_verbose, validate_by_minibatch_verbose
from models.cnn_sat import CNN_SAT
import theano.tensor as T
from utils.network_config import NetworkConfig |
# Implements the Speaker Adaptive Training of DNNs proposed in the following papers:
# [1] Yajie Miao, Hao Zhang, Florian Metze. "Towards Speaker Adaptive Training of Deep
# Neural Network Acoustic Models". Interspeech 2014.
# [2] Yajie Miao, Lu Jiang, Hao Zhang, Florian Metze. "Improvements to Speaker Adaptive
# Training of Deep Neural Networks". SLT 2014.
if __name__ == '__main__':
# check the arguments
arg_elements = [sys.argv[i] for i in range(1, len(sys.argv))]
arguments = parse_arguments(arg_elements)
required_arguments = ['train_data', 'valid_data', 'si_nnet_spec', 'si_conv_nnet_spec', 'wdir', 'adapt_nnet_spec', 'init_model']
for arg in required_arguments:
if arguments.has_key(arg) == False:
print "Error: the argument %s has to be specified" % (arg); exit(1)
# mandatory arguments
train_data_spec = arguments['train_data']; valid_data_spec = arguments['valid_data']
si_nnet_spec = arguments['si_nnet_spec']
si_conv_nnet_spec = arguments['si_conv_nnet_spec']
adapt_nnet_spec = arguments['adapt_nnet_spec'];
wdir = arguments['wdir']
init_model_file = arguments['init_model']
# parse network configuration from arguments, and initialize data reading
cfg_si = NetworkConfig(); cfg_si.model_type = 'CNN'
cfg_si.parse_config_cnn(arguments, '10:' + si_nnet_spec, si_conv_nnet_spec)
cfg_si.init_data_reading(train_data_spec, valid_data_spec)
# parse the structure of the i-vector network
cfg_adapt = NetworkConfig()
net_split = adapt_nnet_spec.split(':')
adapt_nnet_spec = ''
for n in xrange(len(net_split) - 1):
adapt_nnet_spec += net_split[n] + ':'
cfg_adapt.parse_config_dnn(arguments, adapt_nnet_spec + '0')
numpy_rng = numpy.random.RandomState(89677)
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
log('> ... initializing the model')
# setup up the model
dnn = CNN_SAT(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg_si = cfg_si, cfg_adapt = cfg_adapt)
# read the initial DNN (the SI DNN which has been well trained)
# _file2nnet(dnn.cnn_si.layers, filename = init_model_file)
_file2nnet(dnn.cnn_si.layers, filename = 'BKUP/nnet.param.si')
_file2nnet(dnn.dnn_adapt.layers, filename = 'BKUP/nnet.param.adapt')
# get the training and validation functions for adaptation network training
dnn.params = dnn.dnn_adapt.params # only update the parameters of the i-vector nnet
dnn.delta_params = dnn.dnn_adapt.delta_params
log('> ... getting the finetuning functions for iVecNN')
train_fn, valid_fn = dnn.build_finetune_functions(
(cfg_si.train_x, cfg_si.train_y), (cfg_si.valid_x, cfg_si.valid_y),
batch_size = cfg_adapt.batch_size)
log('> ... learning the adaptation network')
cfg = cfg_adapt
while (cfg.lrate.get_rate() != 0):
# one epoch of sgd training
# train_error = train_sgd_verbose(train_fn, cfg_si.train_sets, cfg_si.train_xy,
# cfg.batch_size, cfg.lrate.get_rate(), cfg.momentum)
# log('> epoch %d, training error %f ' % (cfg.lrate.epoch, 100*numpy.mean(train_error)) + '(%)')
# validation
valid_error = validate_by_minibatch_verbose(valid_fn, cfg_si.valid_sets, cfg_si.valid_xy, cfg.batch_size)
log('> epoch %d, lrate %f, validation error %f ' % (cfg.lrate.epoch, cfg.lrate.get_rate(), 100*numpy.mean(valid_error)) + '(%)')
cfg.lrate.get_next_rate(current_error = 100 * numpy.mean(valid_error))
cfg.lrate.rate = 0
# save the model and network configuration
if cfg.param_output_file != '':
_nnet2file(dnn.dnn_adapt.layers, filename = cfg.param_output_file + '.adapt',
input_factor = cfg_adapt.input_dropout_factor, factor = cfg_adapt.dropout_factor)
_nnet2file(dnn.cnn_si.layers, filename = cfg.param_output_file + '.si',
input_factor = cfg_si.input_dropout_factor, factor = cfg_si.dropout_factor)
log('> ... the final PDNN model parameter is ' + cfg.param_output_file + ' (.si, .adapt)')
if cfg.cfg_output_file != '':
_cfg2file(cfg_adapt, filename=cfg.cfg_output_file + '.adapt')
_cfg2file(cfg_si, filename=cfg.cfg_output_file + '.si')
log('> ... the final PDNN model config is ' + cfg.cfg_output_file + ' (.si, .adapt)')
# output the model into Kaldi-compatible format
if cfg.kaldi_output_file != '':
dnn.cnn_si.fc_dnn.write_model_to_kaldi(cfg.kaldi_output_file + '.si')
dnn.dnn_adapt.write_model_to_kaldi(cfg.kaldi_output_file + '.adapt', with_softmax = False)
log('> ... the final Kaldi model is ' + cfg.kaldi_output_file + ' (.si, .adapt)') | from utils.utils import parse_arguments
| random_line_split |
colorbeams.py | """
Color beams pattern
"""
from .pattern import Pattern
import colorsys
import time
class ColorBeams(Pattern):
@staticmethod
def getHue(hue):
hsv = colorsys.hsv_to_rgb(hue, 1, 1)
return int(hsv[0] * 255), int(hsv[1] * 255), int(hsv[2] * 255)
@staticmethod
def highlight(strip, i, hue = 0.5):
i = i % len(strip)
# set the color of this pixel
strip[i] = ColorBeams.getHue(hue)
for x in range(15):
index = (i - x) % len(strip)
decay = pow(0.7, x)
# strip[index] = (int(strip[index][0] * decay), int(strip[index][1] * decay), int(strip[index][2] * decay))
strip[index] = (int(strip[i][0] * decay), int(strip[i][1] * decay), int(strip[i][2] * decay)) |
def __init__(self):
pass
@classmethod
def get_id(self):
return 11
@classmethod
def update(self, strip, state):
# use the time to determine the offset
t = ColorBeams.__get_time()
offset = int(((t % state.delay) / state.delay) * len(strip))
for y in range(0, len(strip), 50):
ColorBeams.highlight(strip, offset + y, (5 * y / len(strip)) % 1) |
@staticmethod
def __get_time():
return time.time() * 1000 | random_line_split |
colorbeams.py | """
Color beams pattern
"""
from .pattern import Pattern
import colorsys
import time
class ColorBeams(Pattern):
@staticmethod
def | (hue):
hsv = colorsys.hsv_to_rgb(hue, 1, 1)
return int(hsv[0] * 255), int(hsv[1] * 255), int(hsv[2] * 255)
@staticmethod
def highlight(strip, i, hue = 0.5):
i = i % len(strip)
# set the color of this pixel
strip[i] = ColorBeams.getHue(hue)
for x in range(15):
index = (i - x) % len(strip)
decay = pow(0.7, x)
# strip[index] = (int(strip[index][0] * decay), int(strip[index][1] * decay), int(strip[index][2] * decay))
strip[index] = (int(strip[i][0] * decay), int(strip[i][1] * decay), int(strip[i][2] * decay))
@staticmethod
def __get_time():
return time.time() * 1000
def __init__(self):
pass
@classmethod
def get_id(self):
return 11
@classmethod
def update(self, strip, state):
# use the time to determine the offset
t = ColorBeams.__get_time()
offset = int(((t % state.delay) / state.delay) * len(strip))
for y in range(0, len(strip), 50):
ColorBeams.highlight(strip, offset + y, (5 * y / len(strip)) % 1)
| getHue | identifier_name |
colorbeams.py | """
Color beams pattern
"""
from .pattern import Pattern
import colorsys
import time
class ColorBeams(Pattern):
@staticmethod
def getHue(hue):
hsv = colorsys.hsv_to_rgb(hue, 1, 1)
return int(hsv[0] * 255), int(hsv[1] * 255), int(hsv[2] * 255)
@staticmethod
def highlight(strip, i, hue = 0.5):
i = i % len(strip)
# set the color of this pixel
strip[i] = ColorBeams.getHue(hue)
for x in range(15):
index = (i - x) % len(strip)
decay = pow(0.7, x)
# strip[index] = (int(strip[index][0] * decay), int(strip[index][1] * decay), int(strip[index][2] * decay))
strip[index] = (int(strip[i][0] * decay), int(strip[i][1] * decay), int(strip[i][2] * decay))
@staticmethod
def __get_time():
return time.time() * 1000
def __init__(self):
pass
@classmethod
def get_id(self):
|
@classmethod
def update(self, strip, state):
# use the time to determine the offset
t = ColorBeams.__get_time()
offset = int(((t % state.delay) / state.delay) * len(strip))
for y in range(0, len(strip), 50):
ColorBeams.highlight(strip, offset + y, (5 * y / len(strip)) % 1)
| return 11 | identifier_body |
colorbeams.py | """
Color beams pattern
"""
from .pattern import Pattern
import colorsys
import time
class ColorBeams(Pattern):
@staticmethod
def getHue(hue):
hsv = colorsys.hsv_to_rgb(hue, 1, 1)
return int(hsv[0] * 255), int(hsv[1] * 255), int(hsv[2] * 255)
@staticmethod
def highlight(strip, i, hue = 0.5):
i = i % len(strip)
# set the color of this pixel
strip[i] = ColorBeams.getHue(hue)
for x in range(15):
|
@staticmethod
def __get_time():
return time.time() * 1000
def __init__(self):
pass
@classmethod
def get_id(self):
return 11
@classmethod
def update(self, strip, state):
# use the time to determine the offset
t = ColorBeams.__get_time()
offset = int(((t % state.delay) / state.delay) * len(strip))
for y in range(0, len(strip), 50):
ColorBeams.highlight(strip, offset + y, (5 * y / len(strip)) % 1)
| index = (i - x) % len(strip)
decay = pow(0.7, x)
# strip[index] = (int(strip[index][0] * decay), int(strip[index][1] * decay), int(strip[index][2] * decay))
strip[index] = (int(strip[i][0] * decay), int(strip[i][1] * decay), int(strip[i][2] * decay)) | conditional_block |
facebookConnect.js | /*
* Module : FacebookConnect.js
*
* Setups up the basic code to connect to the facebook JS api.
*
* Requires Config:
* - app.config.facebook.appId
*/
(function(app)
{
var module = app.module("facebookConnect", {
requires : [
"jquery-1.9.1.min"
],
init : function()
{
this.is_loaded = false;
//loading facebook all.js after we've added the fb-root div to avoid fb warning
$('body').prepend('<div id="fb-root"></div>');
app.getScript("//connect.facebook.net/en_US/all.js");
},
scrollTop : function()
{
if( this.is_loaded )
FB.Canvas.scrollTo(0, 0);
}
});
//add listener to window object
window.fbAsyncInit = function ()
{
module.is_loaded = true;
// init the FB JS SDK
FB.init({
appId : app.config.facebook.appId,
status : true, // check the login status upon init?
cookie : true, // set sessions cookies to allow your server to access the session?
xfbml : false // parse XFBML tags on this page?
});
// Grow the canvas to the correct size
FB.Canvas.scrollTo(0, 0);
FB.Canvas.setSize({ height: $('body').height()-100});
setTimeout("FB.Canvas.setAutoGrow()", 500);
//dispact event |
//fix scroll bars
if (self !== top)
{
$("body").css("overflow", "hidden");
}
};
}(app)); | $(document).trigger('facebookConnected'); | random_line_split |
facebookConnect.js | /*
* Module : FacebookConnect.js
*
* Setups up the basic code to connect to the facebook JS api.
*
* Requires Config:
* - app.config.facebook.appId
*/
(function(app)
{
var module = app.module("facebookConnect", {
requires : [
"jquery-1.9.1.min"
],
init : function()
{
this.is_loaded = false;
//loading facebook all.js after we've added the fb-root div to avoid fb warning
$('body').prepend('<div id="fb-root"></div>');
app.getScript("//connect.facebook.net/en_US/all.js");
},
scrollTop : function()
{
if( this.is_loaded )
FB.Canvas.scrollTo(0, 0);
}
});
//add listener to window object
window.fbAsyncInit = function ()
{
module.is_loaded = true;
// init the FB JS SDK
FB.init({
appId : app.config.facebook.appId,
status : true, // check the login status upon init?
cookie : true, // set sessions cookies to allow your server to access the session?
xfbml : false // parse XFBML tags on this page?
});
// Grow the canvas to the correct size
FB.Canvas.scrollTo(0, 0);
FB.Canvas.setSize({ height: $('body').height()-100});
setTimeout("FB.Canvas.setAutoGrow()", 500);
//dispact event
$(document).trigger('facebookConnected');
//fix scroll bars
if (self !== top)
|
};
}(app));
| {
$("body").css("overflow", "hidden");
} | conditional_block |
mod.rs | use lexer::dfa::*;
use lexer::re::Test;
use rust::RustWrite;
use std::io::{self, Write};
#[cfg(test)]
mod test;
/// Generates a fn `__tokenize` based on the given DFA with the following signature:
///
/// ```ignore
/// fn tokenize(text: &str) -> Option<(usize, usize)>
/// ```
///
/// This function returns `None` if there is no matching
/// token. Otherwise, it returns the pair of (NFA index, length) for
/// the next token.
pub fn compile_tokenize_fn<W: Write>(
prefix: &str,
dfa: &DFA,
out: &mut RustWrite<W>)
-> io::Result<()>
{
let mut matcher = Matcher { prefix: prefix, dfa: dfa, out: out };
try!(matcher.tokenize());
Ok(())
}
struct Matcher<'m, W: Write+'m> {
prefix: &'m str,
dfa: &'m DFA,
out: &'m mut RustWrite<W>,
}
impl<'m,W> Matcher<'m,W>
where W: Write
{
fn tokenize(&mut self) -> io::Result<()> {
rust!(self.out, "fn {}tokenize(text: &str) -> Option<(usize, usize)> {{",
self.prefix);
rust!(self.out, "let mut {}chars = text.char_indices();", self.prefix);
rust!(self.out, "let mut {}current_match: Option<(usize, usize)> = None;", self.prefix);
rust!(self.out, "let mut {}current_state: usize = 0;", self.prefix);
rust!(self.out, "loop {{");
rust!(self.out, "match {}current_state {{", self.prefix);
for (index, state) in self.dfa.states.iter().enumerate() {
rust!(self.out, "{} => {{", index);
try!(self.state(state));
rust!(self.out, "}}");
}
rust!(self.out, "_ => {{ panic!(\"invalid state {{}}\", {}current_state); }}",
self.prefix);
rust!(self.out, "}}");
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn state(&mut self, state: &State) -> io::Result<()> {
// this could be pulled to the top of the loop, but we want to
// encourage LLVM to convert the loop+switch pair into actual
// gotos.
rust!(self.out, "let ({}index, {}ch) = \
match {}chars.next() {{ Some(p) => p, None => return {}current_match }};",
self.prefix, self.prefix, self.prefix, self.prefix);
rust!(self.out, "match {}ch {{", self.prefix);
for &(test, target_state) in &state.test_edges {
match test {
Test::Char(ch) => {
rust!(self.out, "{:?} => {{", ch);
let index = format!("{}index + {}", self.prefix, ch.len_utf8());
try!(self.transition(target_state, &index));
rust!(self.out, "}}");
}
}
}
rust!(self.out, "_ => {{");
let index = format!("{}index + {}ch.len_utf8()", self.prefix, self.prefix);
try!(self.transition(state.other_edge, &index));
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn | (&mut self,
target_state: DFAStateIndex,
index: &str)
-> io::Result<()> {
match self.dfa.state(target_state).kind {
Kind::Accepts(nfa) => {
rust!(self.out, "{}current_match = Some(({}, {}));",
self.prefix, nfa.index(), index)
}
Kind::Neither => { }
Kind::Reject => {
rust!(self.out, "return {}current_match;", self.prefix);
return Ok(());
}
}
rust!(self.out, "{}current_state = {};", self.prefix, target_state.index());
rust!(self.out, "continue;");
Ok(())
}
}
| transition | identifier_name |
mod.rs | use lexer::dfa::*;
use lexer::re::Test;
use rust::RustWrite;
use std::io::{self, Write};
#[cfg(test)]
mod test;
/// Generates a fn `__tokenize` based on the given DFA with the following signature:
///
/// ```ignore
/// fn tokenize(text: &str) -> Option<(usize, usize)>
/// ```
///
/// This function returns `None` if there is no matching
/// token. Otherwise, it returns the pair of (NFA index, length) for
/// the next token.
pub fn compile_tokenize_fn<W: Write>(
prefix: &str,
dfa: &DFA,
out: &mut RustWrite<W>)
-> io::Result<()>
|
struct Matcher<'m, W: Write+'m> {
prefix: &'m str,
dfa: &'m DFA,
out: &'m mut RustWrite<W>,
}
impl<'m,W> Matcher<'m,W>
where W: Write
{
fn tokenize(&mut self) -> io::Result<()> {
rust!(self.out, "fn {}tokenize(text: &str) -> Option<(usize, usize)> {{",
self.prefix);
rust!(self.out, "let mut {}chars = text.char_indices();", self.prefix);
rust!(self.out, "let mut {}current_match: Option<(usize, usize)> = None;", self.prefix);
rust!(self.out, "let mut {}current_state: usize = 0;", self.prefix);
rust!(self.out, "loop {{");
rust!(self.out, "match {}current_state {{", self.prefix);
for (index, state) in self.dfa.states.iter().enumerate() {
rust!(self.out, "{} => {{", index);
try!(self.state(state));
rust!(self.out, "}}");
}
rust!(self.out, "_ => {{ panic!(\"invalid state {{}}\", {}current_state); }}",
self.prefix);
rust!(self.out, "}}");
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn state(&mut self, state: &State) -> io::Result<()> {
// this could be pulled to the top of the loop, but we want to
// encourage LLVM to convert the loop+switch pair into actual
// gotos.
rust!(self.out, "let ({}index, {}ch) = \
match {}chars.next() {{ Some(p) => p, None => return {}current_match }};",
self.prefix, self.prefix, self.prefix, self.prefix);
rust!(self.out, "match {}ch {{", self.prefix);
for &(test, target_state) in &state.test_edges {
match test {
Test::Char(ch) => {
rust!(self.out, "{:?} => {{", ch);
let index = format!("{}index + {}", self.prefix, ch.len_utf8());
try!(self.transition(target_state, &index));
rust!(self.out, "}}");
}
}
}
rust!(self.out, "_ => {{");
let index = format!("{}index + {}ch.len_utf8()", self.prefix, self.prefix);
try!(self.transition(state.other_edge, &index));
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn transition(&mut self,
target_state: DFAStateIndex,
index: &str)
-> io::Result<()> {
match self.dfa.state(target_state).kind {
Kind::Accepts(nfa) => {
rust!(self.out, "{}current_match = Some(({}, {}));",
self.prefix, nfa.index(), index)
}
Kind::Neither => { }
Kind::Reject => {
rust!(self.out, "return {}current_match;", self.prefix);
return Ok(());
}
}
rust!(self.out, "{}current_state = {};", self.prefix, target_state.index());
rust!(self.out, "continue;");
Ok(())
}
}
| {
let mut matcher = Matcher { prefix: prefix, dfa: dfa, out: out };
try!(matcher.tokenize());
Ok(())
} | identifier_body |
mod.rs | use lexer::dfa::*;
use lexer::re::Test;
use rust::RustWrite;
use std::io::{self, Write};
#[cfg(test)]
mod test;
/// Generates a fn `__tokenize` based on the given DFA with the following signature:
///
/// ```ignore
/// fn tokenize(text: &str) -> Option<(usize, usize)>
/// ```
///
/// This function returns `None` if there is no matching
/// token. Otherwise, it returns the pair of (NFA index, length) for
/// the next token.
pub fn compile_tokenize_fn<W: Write>(
prefix: &str,
dfa: &DFA,
out: &mut RustWrite<W>)
-> io::Result<()>
{
let mut matcher = Matcher { prefix: prefix, dfa: dfa, out: out };
try!(matcher.tokenize());
Ok(())
}
struct Matcher<'m, W: Write+'m> {
prefix: &'m str,
dfa: &'m DFA,
out: &'m mut RustWrite<W>,
}
impl<'m,W> Matcher<'m,W>
where W: Write
{
fn tokenize(&mut self) -> io::Result<()> {
rust!(self.out, "fn {}tokenize(text: &str) -> Option<(usize, usize)> {{",
self.prefix);
rust!(self.out, "let mut {}chars = text.char_indices();", self.prefix);
rust!(self.out, "let mut {}current_match: Option<(usize, usize)> = None;", self.prefix);
rust!(self.out, "let mut {}current_state: usize = 0;", self.prefix);
rust!(self.out, "loop {{");
rust!(self.out, "match {}current_state {{", self.prefix);
for (index, state) in self.dfa.states.iter().enumerate() {
rust!(self.out, "{} => {{", index);
try!(self.state(state));
rust!(self.out, "}}");
}
rust!(self.out, "_ => {{ panic!(\"invalid state {{}}\", {}current_state); }}",
self.prefix);
rust!(self.out, "}}");
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn state(&mut self, state: &State) -> io::Result<()> {
// this could be pulled to the top of the loop, but we want to
// encourage LLVM to convert the loop+switch pair into actual
// gotos.
rust!(self.out, "let ({}index, {}ch) = \
match {}chars.next() {{ Some(p) => p, None => return {}current_match }};",
self.prefix, self.prefix, self.prefix, self.prefix);
rust!(self.out, "match {}ch {{", self.prefix);
for &(test, target_state) in &state.test_edges {
match test {
Test::Char(ch) => {
rust!(self.out, "{:?} => {{", ch);
let index = format!("{}index + {}", self.prefix, ch.len_utf8());
try!(self.transition(target_state, &index));
rust!(self.out, "}}");
}
}
}
rust!(self.out, "_ => {{");
let index = format!("{}index + {}ch.len_utf8()", self.prefix, self.prefix);
try!(self.transition(state.other_edge, &index));
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn transition(&mut self,
target_state: DFAStateIndex,
index: &str)
-> io::Result<()> {
match self.dfa.state(target_state).kind {
Kind::Accepts(nfa) => {
rust!(self.out, "{}current_match = Some(({}, {}));",
self.prefix, nfa.index(), index)
}
Kind::Neither => |
Kind::Reject => {
rust!(self.out, "return {}current_match;", self.prefix);
return Ok(());
}
}
rust!(self.out, "{}current_state = {};", self.prefix, target_state.index());
rust!(self.out, "continue;");
Ok(())
}
}
| { } | conditional_block |
mod.rs | use lexer::dfa::*;
use lexer::re::Test;
use rust::RustWrite;
use std::io::{self, Write};
#[cfg(test)]
mod test;
/// Generates a fn `__tokenize` based on the given DFA with the following signature:
///
/// ```ignore
/// fn tokenize(text: &str) -> Option<(usize, usize)>
/// ```
///
/// This function returns `None` if there is no matching
/// token. Otherwise, it returns the pair of (NFA index, length) for
/// the next token.
pub fn compile_tokenize_fn<W: Write>(
prefix: &str,
dfa: &DFA,
out: &mut RustWrite<W>)
-> io::Result<()>
{
let mut matcher = Matcher { prefix: prefix, dfa: dfa, out: out };
try!(matcher.tokenize());
Ok(())
}
struct Matcher<'m, W: Write+'m> {
prefix: &'m str,
dfa: &'m DFA,
out: &'m mut RustWrite<W>,
}
impl<'m,W> Matcher<'m,W>
where W: Write
{
fn tokenize(&mut self) -> io::Result<()> {
rust!(self.out, "fn {}tokenize(text: &str) -> Option<(usize, usize)> {{",
self.prefix);
rust!(self.out, "let mut {}chars = text.char_indices();", self.prefix);
rust!(self.out, "let mut {}current_match: Option<(usize, usize)> = None;", self.prefix);
rust!(self.out, "let mut {}current_state: usize = 0;", self.prefix);
rust!(self.out, "loop {{");
rust!(self.out, "match {}current_state {{", self.prefix); | for (index, state) in self.dfa.states.iter().enumerate() {
rust!(self.out, "{} => {{", index);
try!(self.state(state));
rust!(self.out, "}}");
}
rust!(self.out, "_ => {{ panic!(\"invalid state {{}}\", {}current_state); }}",
self.prefix);
rust!(self.out, "}}");
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn state(&mut self, state: &State) -> io::Result<()> {
// this could be pulled to the top of the loop, but we want to
// encourage LLVM to convert the loop+switch pair into actual
// gotos.
rust!(self.out, "let ({}index, {}ch) = \
match {}chars.next() {{ Some(p) => p, None => return {}current_match }};",
self.prefix, self.prefix, self.prefix, self.prefix);
rust!(self.out, "match {}ch {{", self.prefix);
for &(test, target_state) in &state.test_edges {
match test {
Test::Char(ch) => {
rust!(self.out, "{:?} => {{", ch);
let index = format!("{}index + {}", self.prefix, ch.len_utf8());
try!(self.transition(target_state, &index));
rust!(self.out, "}}");
}
}
}
rust!(self.out, "_ => {{");
let index = format!("{}index + {}ch.len_utf8()", self.prefix, self.prefix);
try!(self.transition(state.other_edge, &index));
rust!(self.out, "}}");
rust!(self.out, "}}");
Ok(())
}
fn transition(&mut self,
target_state: DFAStateIndex,
index: &str)
-> io::Result<()> {
match self.dfa.state(target_state).kind {
Kind::Accepts(nfa) => {
rust!(self.out, "{}current_match = Some(({}, {}));",
self.prefix, nfa.index(), index)
}
Kind::Neither => { }
Kind::Reject => {
rust!(self.out, "return {}current_match;", self.prefix);
return Ok(());
}
}
rust!(self.out, "{}current_state = {};", self.prefix, target_state.index());
rust!(self.out, "continue;");
Ok(())
}
} | random_line_split |
|
test.py | # -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCase, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setting('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication)
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location']
except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def setUp(self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
| """
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass | identifier_body |
|
test.py | # -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCase, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setting('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication)
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location']
except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def | (self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
"""
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass
| setUp | identifier_name |
test.py | # -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCase, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setting('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
|
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location']
except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def setUp(self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
"""
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass
| data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication) | conditional_block |
test.py | # -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCase, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setting('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication)
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location'] | return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def setUp(self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
"""
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass | except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
| random_line_split |
immunicity.py | import re
import fnmatch
import urllib2
from kodipopcorntime import plugin
from kodipopcorntime.caching import shelf
PAC_URL = "http://clientconfig.immunicity.org/pacs/all.pac"
CACHE = 1 * 3600 # 1 hour caching
_config = {}
def config():
global _config
if not _config:
with shelf("kodipopcorntime.immunicity.pac_config", ttl=CACHE) as pac_config:
plugin.log.info("Fetching Immunicity PAC file")
pac_data = urllib2.urlopen(PAC_URL).read()
pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1)
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
_config = pac_config
return _config
def get_proxy_for(url):
| conf = config()
for domain in conf["domains"]:
if re.search(domain, url):
return conf["server"] | identifier_body |
|
immunicity.py | import re
import fnmatch
import urllib2
from kodipopcorntime import plugin
from kodipopcorntime.caching import shelf
PAC_URL = "http://clientconfig.immunicity.org/pacs/all.pac"
CACHE = 1 * 3600 # 1 hour caching
_config = {}
def config():
global _config
if not _config:
|
return _config
def get_proxy_for(url):
conf = config()
for domain in conf["domains"]:
if re.search(domain, url):
return conf["server"]
| with shelf("kodipopcorntime.immunicity.pac_config", ttl=CACHE) as pac_config:
plugin.log.info("Fetching Immunicity PAC file")
pac_data = urllib2.urlopen(PAC_URL).read()
pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1)
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
_config = pac_config | conditional_block |
immunicity.py | import re
import fnmatch
import urllib2
from kodipopcorntime import plugin
from kodipopcorntime.caching import shelf
PAC_URL = "http://clientconfig.immunicity.org/pacs/all.pac"
CACHE = 1 * 3600 # 1 hour caching
_config = {}
def config():
global _config
if not _config:
with shelf("kodipopcorntime.immunicity.pac_config", ttl=CACHE) as pac_config:
plugin.log.info("Fetching Immunicity PAC file")
pac_data = urllib2.urlopen(PAC_URL).read()
pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1)
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
_config = pac_config
return _config
def | (url):
conf = config()
for domain in conf["domains"]:
if re.search(domain, url):
return conf["server"]
| get_proxy_for | identifier_name |
immunicity.py | import re
import fnmatch
import urllib2
from kodipopcorntime import plugin
from kodipopcorntime.caching import shelf
PAC_URL = "http://clientconfig.immunicity.org/pacs/all.pac"
CACHE = 1 * 3600 # 1 hour caching
_config = {}
def config():
global _config
if not _config:
with shelf("kodipopcorntime.immunicity.pac_config", ttl=CACHE) as pac_config:
plugin.log.info("Fetching Immunicity PAC file")
pac_data = urllib2.urlopen(PAC_URL).read()
pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1)
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
_config = pac_config
return _config
def get_proxy_for(url): | for domain in conf["domains"]:
if re.search(domain, url):
return conf["server"] | conf = config() | random_line_split |
recipe-440498.py | # Relative-refs.pyw
"""A short python script for repathing xrefs in Autocad."""
import win32com.client,os, os.path, tkFileDialog
from Tkinter import *
from tkMessageBox import askokcancel
from time import sleep
# Get a COM object for Autocad
acad = win32com.client.Dispatch("AutoCAD.Application")
def repath(filename):
print 'Repathing %s...' %filename
doc = acad.Documents.Open(filename)
blocks = doc.Database.Blocks # Internally xrefs are just blocks!
xrefs = [item for item in blocks if item.IsXRef]
if xrefs:
for xref in xrefs:
old_path = xref.Path
new_path = os.path.join('..\\x-ref\\',os.path.basename(old_path))
xref.Path = new_path
print 'Old path name was %s, new path name is %s.\n' %(old_path, new_path)
try:
doc.Close(True) # Close and save
except: # Something when wrong,
doc.Close(False) # close then report it
raise
class Logger:
"""A filelike object that prints its input on the screen."""
def __init__(self, logfile=None):
"""Takes one argument, a file like object for logging."""
print 'Starting logger...'
if not logfile:
self.logfile = open('relative-refs.log','w')
else:
self.logfile = logfile
sys.stderr = self # Super cheap logging facility...
sys.stdout = self # Just redirect output to a file.
print 'Logger running...'
def write(self, line):
sys.__stdout__.write(line)
self.logfile.write(line)
def close(self):
"""The close method restores stdout and stderr to normal."""
self.logfile.close()
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
class Tktextfile:
"""A file like interface to the Tk text widget."""
def __init__(self, root):
"""Create a scrollable text widget to be written to."""
self.root = root
self.text = Text(root,width=40,height=20)
self.text.pack(side=LEFT, expand=True, fill=BOTH)
scrollbar = Scrollbar(root)
scrollbar.pack(side=RIGHT,fill=Y)
self.text.configure(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.text.yview)
self.text.focus()
def write(self, line):
"""Write method for file like widget."""
self.text.insert(INSERT, line)
self.text.see(END)
def close(self):
"""Fake close method."""
pass
if __name__ == '__main__':
if acad.Visible:
acad.Visible = False
root = Tk()
text = Tktextfile(root)
logger = Logger(text)
dir = tkFileDialog.askdirectory()
answer = askokcancel('RePath','Re path all dwg files in ' + dir + '?')
if answer:
for dirpath, subdirs, files in os.walk(dir):
for name in files:
ext = name.split('.')[-1] or ''
# We want dwg files which are not in the x-ref directory
if ext.lower() == 'dwg' and 'x-ref' not in dirpath.lower():
|
root.update()
acad.Visible = True
| drawing = os.path.join(dirpath, name)
try:
repath(drawing)
except:
print 'Unable to repath drawing %s!' %drawing | conditional_block |
recipe-440498.py | # Relative-refs.pyw
"""A short python script for repathing xrefs in Autocad."""
import win32com.client,os, os.path, tkFileDialog
from Tkinter import *
from tkMessageBox import askokcancel
from time import sleep
# Get a COM object for Autocad
acad = win32com.client.Dispatch("AutoCAD.Application")
def repath(filename):
print 'Repathing %s...' %filename
doc = acad.Documents.Open(filename)
blocks = doc.Database.Blocks # Internally xrefs are just blocks!
xrefs = [item for item in blocks if item.IsXRef]
if xrefs:
for xref in xrefs:
old_path = xref.Path
new_path = os.path.join('..\\x-ref\\',os.path.basename(old_path))
xref.Path = new_path
print 'Old path name was %s, new path name is %s.\n' %(old_path, new_path)
try:
doc.Close(True) # Close and save
except: # Something when wrong,
doc.Close(False) # close then report it
raise
class Logger:
"""A filelike object that prints its input on the screen."""
def __init__(self, logfile=None):
"""Takes one argument, a file like object for logging."""
print 'Starting logger...'
if not logfile:
self.logfile = open('relative-refs.log','w')
else:
self.logfile = logfile
sys.stderr = self # Super cheap logging facility...
sys.stdout = self # Just redirect output to a file.
print 'Logger running...'
def write(self, line):
sys.__stdout__.write(line)
self.logfile.write(line)
def close(self):
"""The close method restores stdout and stderr to normal."""
self.logfile.close()
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
class Tktextfile:
"""A file like interface to the Tk text widget."""
def __init__(self, root):
"""Create a scrollable text widget to be written to."""
self.root = root
self.text = Text(root,width=40,height=20)
self.text.pack(side=LEFT, expand=True, fill=BOTH)
scrollbar = Scrollbar(root)
scrollbar.pack(side=RIGHT,fill=Y)
self.text.configure(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.text.yview)
self.text.focus()
def write(self, line):
|
def close(self):
"""Fake close method."""
pass
if __name__ == '__main__':
if acad.Visible:
acad.Visible = False
root = Tk()
text = Tktextfile(root)
logger = Logger(text)
dir = tkFileDialog.askdirectory()
answer = askokcancel('RePath','Re path all dwg files in ' + dir + '?')
if answer:
for dirpath, subdirs, files in os.walk(dir):
for name in files:
ext = name.split('.')[-1] or ''
# We want dwg files which are not in the x-ref directory
if ext.lower() == 'dwg' and 'x-ref' not in dirpath.lower():
drawing = os.path.join(dirpath, name)
try:
repath(drawing)
except:
print 'Unable to repath drawing %s!' %drawing
root.update()
acad.Visible = True
| """Write method for file like widget."""
self.text.insert(INSERT, line)
self.text.see(END) | identifier_body |
recipe-440498.py | # Relative-refs.pyw
"""A short python script for repathing xrefs in Autocad."""
import win32com.client,os, os.path, tkFileDialog
from Tkinter import *
from tkMessageBox import askokcancel
from time import sleep
# Get a COM object for Autocad
acad = win32com.client.Dispatch("AutoCAD.Application")
def repath(filename):
print 'Repathing %s...' %filename
doc = acad.Documents.Open(filename)
blocks = doc.Database.Blocks # Internally xrefs are just blocks!
xrefs = [item for item in blocks if item.IsXRef]
if xrefs:
for xref in xrefs:
old_path = xref.Path
new_path = os.path.join('..\\x-ref\\',os.path.basename(old_path))
xref.Path = new_path
print 'Old path name was %s, new path name is %s.\n' %(old_path, new_path)
try:
doc.Close(True) # Close and save
except: # Something when wrong,
doc.Close(False) # close then report it
raise
class | :
"""A filelike object that prints its input on the screen."""
def __init__(self, logfile=None):
"""Takes one argument, a file like object for logging."""
print 'Starting logger...'
if not logfile:
self.logfile = open('relative-refs.log','w')
else:
self.logfile = logfile
sys.stderr = self # Super cheap logging facility...
sys.stdout = self # Just redirect output to a file.
print 'Logger running...'
def write(self, line):
sys.__stdout__.write(line)
self.logfile.write(line)
def close(self):
"""The close method restores stdout and stderr to normal."""
self.logfile.close()
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
class Tktextfile:
"""A file like interface to the Tk text widget."""
def __init__(self, root):
"""Create a scrollable text widget to be written to."""
self.root = root
self.text = Text(root,width=40,height=20)
self.text.pack(side=LEFT, expand=True, fill=BOTH)
scrollbar = Scrollbar(root)
scrollbar.pack(side=RIGHT,fill=Y)
self.text.configure(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.text.yview)
self.text.focus()
def write(self, line):
"""Write method for file like widget."""
self.text.insert(INSERT, line)
self.text.see(END)
def close(self):
"""Fake close method."""
pass
if __name__ == '__main__':
if acad.Visible:
acad.Visible = False
root = Tk()
text = Tktextfile(root)
logger = Logger(text)
dir = tkFileDialog.askdirectory()
answer = askokcancel('RePath','Re path all dwg files in ' + dir + '?')
if answer:
for dirpath, subdirs, files in os.walk(dir):
for name in files:
ext = name.split('.')[-1] or ''
# We want dwg files which are not in the x-ref directory
if ext.lower() == 'dwg' and 'x-ref' not in dirpath.lower():
drawing = os.path.join(dirpath, name)
try:
repath(drawing)
except:
print 'Unable to repath drawing %s!' %drawing
root.update()
acad.Visible = True
| Logger | identifier_name |
recipe-440498.py | # Relative-refs.pyw
"""A short python script for repathing xrefs in Autocad."""
import win32com.client,os, os.path, tkFileDialog
from Tkinter import *
from tkMessageBox import askokcancel
from time import sleep
# Get a COM object for Autocad
acad = win32com.client.Dispatch("AutoCAD.Application")
def repath(filename):
print 'Repathing %s...' %filename
doc = acad.Documents.Open(filename)
blocks = doc.Database.Blocks # Internally xrefs are just blocks!
xrefs = [item for item in blocks if item.IsXRef]
if xrefs:
for xref in xrefs:
old_path = xref.Path
new_path = os.path.join('..\\x-ref\\',os.path.basename(old_path))
xref.Path = new_path
print 'Old path name was %s, new path name is %s.\n' %(old_path, new_path)
try:
doc.Close(True) # Close and save
except: # Something when wrong,
doc.Close(False) # close then report it
raise
class Logger:
"""A filelike object that prints its input on the screen."""
def __init__(self, logfile=None):
"""Takes one argument, a file like object for logging."""
print 'Starting logger...'
if not logfile:
self.logfile = open('relative-refs.log','w')
else:
self.logfile = logfile
sys.stderr = self # Super cheap logging facility...
sys.stdout = self # Just redirect output to a file.
print 'Logger running...'
def write(self, line):
sys.__stdout__.write(line)
self.logfile.write(line)
def close(self):
"""The close method restores stdout and stderr to normal."""
self.logfile.close()
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
class Tktextfile:
"""A file like interface to the Tk text widget."""
def __init__(self, root):
"""Create a scrollable text widget to be written to."""
self.root = root
self.text = Text(root,width=40,height=20)
self.text.pack(side=LEFT, expand=True, fill=BOTH)
scrollbar = Scrollbar(root)
scrollbar.pack(side=RIGHT,fill=Y)
self.text.configure(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.text.yview)
self.text.focus()
def write(self, line):
"""Write method for file like widget."""
self.text.insert(INSERT, line)
self.text.see(END)
def close(self):
"""Fake close method."""
pass
if __name__ == '__main__':
if acad.Visible:
acad.Visible = False
root = Tk()
text = Tktextfile(root)
logger = Logger(text)
dir = tkFileDialog.askdirectory()
answer = askokcancel('RePath','Re path all dwg files in ' + dir + '?')
if answer:
for dirpath, subdirs, files in os.walk(dir):
for name in files:
ext = name.split('.')[-1] or ''
# We want dwg files which are not in the x-ref directory
if ext.lower() == 'dwg' and 'x-ref' not in dirpath.lower(): | drawing = os.path.join(dirpath, name)
try:
repath(drawing)
except:
print 'Unable to repath drawing %s!' %drawing
root.update()
acad.Visible = True | random_line_split |
|
services.py | import logging
import requests
import xml.etree.ElementTree as ET
from django.utils.translation import gettext_lazy as _
from churchill.apps.currencies.models import CurrencyValue, Currency, CurrencyValueType
logger = logging.getLogger()
def get_default_currency_id() -> int:
currency, _ = Currency.objects.get_or_create(
name="United States Dollar", iso3="USD"
)
return currency.id
def get_currency_options() -> dict:
return {c.iso3: c.name for c in Currency.objects.all()}
def create_currency_pair(currency, node):
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.BUY,
value=float(node.find("purchase").text),
)
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.SELL,
value=float(node.find("sale").text),
)
def update_currencies():
byn_currency = Currency.objects.first(iso3="BYN")
eur_currency = Currency.objects.first(iso3="EUR")
rub_currency = Currency.objects.first(iso3="RUB")
if not any((byn_currency, eur_currency, rub_currency)):
logger.info(_("No currencies are setup"))
response = requests.get("https://www.mtbank.by/currxml.php?ver=2")
if response.status_code == 200:
for child in ET.fromstring(response.content): | code_to = node.find("codeTo").text
if (
byn_currency
and (code == "USD" and code_to == "BYN")
or (code == "BYN" and code_to == "USD")
):
create_currency_pair(byn_currency, node)
if (
eur_currency
and (code == "USD" and code_to == "EUR")
or (code == "EUR" and code_to == "USD")
):
create_currency_pair(eur_currency, node)
if (
rub_currency
and (code == "USD" and code_to == "RUB")
or (code == "RUB" and code_to == "USD")
):
create_currency_pair(rub_currency, node) | if child.attrib.get("id") == "168,768,968,868":
for node in child.findall("currency"):
code = node.find("code").text | random_line_split |
services.py | import logging
import requests
import xml.etree.ElementTree as ET
from django.utils.translation import gettext_lazy as _
from churchill.apps.currencies.models import CurrencyValue, Currency, CurrencyValueType
logger = logging.getLogger()
def | () -> int:
currency, _ = Currency.objects.get_or_create(
name="United States Dollar", iso3="USD"
)
return currency.id
def get_currency_options() -> dict:
return {c.iso3: c.name for c in Currency.objects.all()}
def create_currency_pair(currency, node):
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.BUY,
value=float(node.find("purchase").text),
)
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.SELL,
value=float(node.find("sale").text),
)
def update_currencies():
byn_currency = Currency.objects.first(iso3="BYN")
eur_currency = Currency.objects.first(iso3="EUR")
rub_currency = Currency.objects.first(iso3="RUB")
if not any((byn_currency, eur_currency, rub_currency)):
logger.info(_("No currencies are setup"))
response = requests.get("https://www.mtbank.by/currxml.php?ver=2")
if response.status_code == 200:
for child in ET.fromstring(response.content):
if child.attrib.get("id") == "168,768,968,868":
for node in child.findall("currency"):
code = node.find("code").text
code_to = node.find("codeTo").text
if (
byn_currency
and (code == "USD" and code_to == "BYN")
or (code == "BYN" and code_to == "USD")
):
create_currency_pair(byn_currency, node)
if (
eur_currency
and (code == "USD" and code_to == "EUR")
or (code == "EUR" and code_to == "USD")
):
create_currency_pair(eur_currency, node)
if (
rub_currency
and (code == "USD" and code_to == "RUB")
or (code == "RUB" and code_to == "USD")
):
create_currency_pair(rub_currency, node)
| get_default_currency_id | identifier_name |
services.py | import logging
import requests
import xml.etree.ElementTree as ET
from django.utils.translation import gettext_lazy as _
from churchill.apps.currencies.models import CurrencyValue, Currency, CurrencyValueType
logger = logging.getLogger()
def get_default_currency_id() -> int:
currency, _ = Currency.objects.get_or_create(
name="United States Dollar", iso3="USD"
)
return currency.id
def get_currency_options() -> dict:
return {c.iso3: c.name for c in Currency.objects.all()}
def create_currency_pair(currency, node):
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.BUY,
value=float(node.find("purchase").text),
)
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.SELL,
value=float(node.find("sale").text),
)
def update_currencies():
| byn_currency = Currency.objects.first(iso3="BYN")
eur_currency = Currency.objects.first(iso3="EUR")
rub_currency = Currency.objects.first(iso3="RUB")
if not any((byn_currency, eur_currency, rub_currency)):
logger.info(_("No currencies are setup"))
response = requests.get("https://www.mtbank.by/currxml.php?ver=2")
if response.status_code == 200:
for child in ET.fromstring(response.content):
if child.attrib.get("id") == "168,768,968,868":
for node in child.findall("currency"):
code = node.find("code").text
code_to = node.find("codeTo").text
if (
byn_currency
and (code == "USD" and code_to == "BYN")
or (code == "BYN" and code_to == "USD")
):
create_currency_pair(byn_currency, node)
if (
eur_currency
and (code == "USD" and code_to == "EUR")
or (code == "EUR" and code_to == "USD")
):
create_currency_pair(eur_currency, node)
if (
rub_currency
and (code == "USD" and code_to == "RUB")
or (code == "RUB" and code_to == "USD")
):
create_currency_pair(rub_currency, node) | identifier_body |
|
services.py | import logging
import requests
import xml.etree.ElementTree as ET
from django.utils.translation import gettext_lazy as _
from churchill.apps.currencies.models import CurrencyValue, Currency, CurrencyValueType
logger = logging.getLogger()
def get_default_currency_id() -> int:
currency, _ = Currency.objects.get_or_create(
name="United States Dollar", iso3="USD"
)
return currency.id
def get_currency_options() -> dict:
return {c.iso3: c.name for c in Currency.objects.all()}
def create_currency_pair(currency, node):
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.BUY,
value=float(node.find("purchase").text),
)
CurrencyValue.objects.create(
currency=currency,
type=CurrencyValueType.SELL,
value=float(node.find("sale").text),
)
def update_currencies():
byn_currency = Currency.objects.first(iso3="BYN")
eur_currency = Currency.objects.first(iso3="EUR")
rub_currency = Currency.objects.first(iso3="RUB")
if not any((byn_currency, eur_currency, rub_currency)):
logger.info(_("No currencies are setup"))
response = requests.get("https://www.mtbank.by/currxml.php?ver=2")
if response.status_code == 200:
for child in ET.fromstring(response.content):
| if child.attrib.get("id") == "168,768,968,868":
for node in child.findall("currency"):
code = node.find("code").text
code_to = node.find("codeTo").text
if (
byn_currency
and (code == "USD" and code_to == "BYN")
or (code == "BYN" and code_to == "USD")
):
create_currency_pair(byn_currency, node)
if (
eur_currency
and (code == "USD" and code_to == "EUR")
or (code == "EUR" and code_to == "USD")
):
create_currency_pair(eur_currency, node)
if (
rub_currency
and (code == "USD" and code_to == "RUB")
or (code == "RUB" and code_to == "USD")
):
create_currency_pair(rub_currency, node) | conditional_block |
|
fix_csxml_character_encoding.py | import sys
import logging
codec_options = ['utf-8', 'latin_1']
logger = logging.getLogger(__name__)
def try_decode(byte_string, codec):
try:
s = byte_string.decode(codec)
return s
except:
return None
def shortest_string(strings):
best_string = None
best_length = None
for s in strings:
if best_string is None or len(s) < best_length:
best_string = s
best_length = len(s)
return best_string
def fix_character_encoding(input_file, output_file):
with open(input_file, 'rb') as f_in:
with open(output_file, 'wb') as f_out:
for line in f_in:
# Try to decode with both latin_1 and utf-8
decoded = [try_decode(line, c) for c in codec_options]
decoded = [d for d in decoded if d is not None]
if len(decoded) == 0:
# Hopefully at least one codec worked
|
else:
# If more than one, choose the codec that gives the best
# length
chosen_string = shortest_string(decoded)
# Write result as ascii, with non-ascii characters escaped
f_out.write(chosen_string.encode('utf-8'))
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) != 2:
logger.error('Expected two arguments: the input file'
' and the output file')
sys.exit(1)
input_file = args[0]
output_file = args[1]
fix_character_encoding(input_file, output_file)
| logger.info('Could not decode: %s' % line)
sys.exit(1) | conditional_block |
fix_csxml_character_encoding.py | import sys
import logging
codec_options = ['utf-8', 'latin_1']
logger = logging.getLogger(__name__)
def try_decode(byte_string, codec):
try:
s = byte_string.decode(codec)
return s
except:
return None
def shortest_string(strings):
|
def fix_character_encoding(input_file, output_file):
with open(input_file, 'rb') as f_in:
with open(output_file, 'wb') as f_out:
for line in f_in:
# Try to decode with both latin_1 and utf-8
decoded = [try_decode(line, c) for c in codec_options]
decoded = [d for d in decoded if d is not None]
if len(decoded) == 0:
# Hopefully at least one codec worked
logger.info('Could not decode: %s' % line)
sys.exit(1)
else:
# If more than one, choose the codec that gives the best
# length
chosen_string = shortest_string(decoded)
# Write result as ascii, with non-ascii characters escaped
f_out.write(chosen_string.encode('utf-8'))
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) != 2:
logger.error('Expected two arguments: the input file'
' and the output file')
sys.exit(1)
input_file = args[0]
output_file = args[1]
fix_character_encoding(input_file, output_file)
| best_string = None
best_length = None
for s in strings:
if best_string is None or len(s) < best_length:
best_string = s
best_length = len(s)
return best_string | identifier_body |
fix_csxml_character_encoding.py | import sys
import logging
codec_options = ['utf-8', 'latin_1']
logger = logging.getLogger(__name__)
def try_decode(byte_string, codec):
try:
s = byte_string.decode(codec)
return s
except:
return None
def | (strings):
best_string = None
best_length = None
for s in strings:
if best_string is None or len(s) < best_length:
best_string = s
best_length = len(s)
return best_string
def fix_character_encoding(input_file, output_file):
with open(input_file, 'rb') as f_in:
with open(output_file, 'wb') as f_out:
for line in f_in:
# Try to decode with both latin_1 and utf-8
decoded = [try_decode(line, c) for c in codec_options]
decoded = [d for d in decoded if d is not None]
if len(decoded) == 0:
# Hopefully at least one codec worked
logger.info('Could not decode: %s' % line)
sys.exit(1)
else:
# If more than one, choose the codec that gives the best
# length
chosen_string = shortest_string(decoded)
# Write result as ascii, with non-ascii characters escaped
f_out.write(chosen_string.encode('utf-8'))
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) != 2:
logger.error('Expected two arguments: the input file'
' and the output file')
sys.exit(1)
input_file = args[0]
output_file = args[1]
fix_character_encoding(input_file, output_file)
| shortest_string | identifier_name |
fix_csxml_character_encoding.py | import sys
import logging
codec_options = ['utf-8', 'latin_1']
logger = logging.getLogger(__name__)
def try_decode(byte_string, codec):
try:
s = byte_string.decode(codec)
return s
except:
return None
def shortest_string(strings):
best_string = None
best_length = None
for s in strings:
if best_string is None or len(s) < best_length:
best_string = s
best_length = len(s)
return best_string
def fix_character_encoding(input_file, output_file):
with open(input_file, 'rb') as f_in:
with open(output_file, 'wb') as f_out:
for line in f_in:
# Try to decode with both latin_1 and utf-8
decoded = [try_decode(line, c) for c in codec_options]
decoded = [d for d in decoded if d is not None]
if len(decoded) == 0:
# Hopefully at least one codec worked
logger.info('Could not decode: %s' % line)
sys.exit(1)
else:
# If more than one, choose the codec that gives the best
# length
chosen_string = shortest_string(decoded)
| args = sys.argv[1:]
if len(args) != 2:
logger.error('Expected two arguments: the input file'
' and the output file')
sys.exit(1)
input_file = args[0]
output_file = args[1]
fix_character_encoding(input_file, output_file) | # Write result as ascii, with non-ascii characters escaped
f_out.write(chosen_string.encode('utf-8'))
if __name__ == '__main__': | random_line_split |
index.tsx | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* | * under the License.
*/
import { DatePicker as AntdDatePicker } from 'antd';
import { styled } from '@superset-ui/core';
const AntdRangePicker = AntdDatePicker.RangePicker;
export const RangePicker = styled(AntdRangePicker)`
border-radius: ${({ theme }) => theme.gridUnit}px;
`;
export const DatePicker = AntdDatePicker; | * Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations | random_line_split |
version.py | #!/usr/bin/python3
"""Script to determine the Pywikibot version (tag, revision and date).
.. versionchanged:: 7.0
version script was moved to the framework scripts folder
"""
#
# (C) Pywikibot team, 2007-2021
#
# Distributed under the terms of the MIT license.
#
import codecs
import os
import sys
import pywikibot
from pywikibot.version import get_toolforge_hostname, getversion
class DummyModule:
"""Fake module instance."""
__version__ = 'n/a'
try:
import setuptools
except ImportError:
setuptools = DummyModule()
try:
import mwparserfromhell
except ImportError:
mwparserfromhell = DummyModule()
try:
import wikitextparser
except ImportError:
wikitextparser = DummyModule()
try:
import requests
except ImportError:
requests = DummyModule()
WMF_CACERT = 'MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs'
def main(*args: str) -> None:
"""Print pywikibot version and important settings."""
pywikibot.output('Pywikibot: ' + getversion())
pywikibot.output('Release version: ' + pywikibot.__version__)
pywikibot.output('setuptools version: ' + setuptools.__version__)
pywikibot.output('mwparserfromhell version: '
+ mwparserfromhell.__version__)
pywikibot.output('wikitextparser version: ' + wikitextparser.__version__)
pywikibot.output('requests version: ' + requests.__version__)
has_wikimedia_cert = False
if (not hasattr(requests, 'certs')
or not hasattr(requests.certs, 'where')
or not callable(requests.certs.where)):
pywikibot.output(' cacerts: not defined')
elif not os.path.isfile(requests.certs.where()):
pywikibot.output(' cacerts: {} (missing)'.format(
requests.certs.where()))
else:
pywikibot.output(' cacerts: ' + requests.certs.where())
with codecs.open(requests.certs.where(), 'r', 'utf-8') as cert_file:
text = cert_file.read()
if WMF_CACERT in text:
has_wikimedia_cert = True
pywikibot.output(' certificate test: {}'
.format('ok' if has_wikimedia_cert else 'not ok'))
if not has_wikimedia_cert:
pywikibot.output(' Please reinstall requests!')
pywikibot.output('Python: ' + sys.version)
toolforge_env_hostname = get_toolforge_hostname()
if toolforge_env_hostname:
pywikibot.output('Toolforge hostname: ' + toolforge_env_hostname)
# check environment settings
settings = {key for key in os.environ if key.startswith('PYWIKIBOT')}
settings.update(['PYWIKIBOT_DIR', 'PYWIKIBOT_DIR_PWB',
'PYWIKIBOT_NO_USER_CONFIG'])
for environ_name in sorted(settings):
pywikibot.output(
'{}: {}'.format(environ_name,
os.environ.get(environ_name, 'Not set') or "''"))
pywikibot.output('Config base dir: ' + pywikibot.config.base_dir)
for family, usernames in pywikibot.config.usernames.items():
if not usernames:
|
pywikibot.output('Usernames for family {!r}:'.format(family))
for lang, username in usernames.items():
pywikibot.output('\t{}: {}'.format(lang, username))
if __name__ == '__main__':
main()
| continue | conditional_block |
version.py | #!/usr/bin/python3
"""Script to determine the Pywikibot version (tag, revision and date).
.. versionchanged:: 7.0
version script was moved to the framework scripts folder
"""
#
# (C) Pywikibot team, 2007-2021
#
# Distributed under the terms of the MIT license.
#
import codecs
import os
import sys
import pywikibot
from pywikibot.version import get_toolforge_hostname, getversion
class DummyModule:
"""Fake module instance."""
__version__ = 'n/a'
try:
import setuptools
except ImportError:
setuptools = DummyModule()
try:
import mwparserfromhell
except ImportError:
mwparserfromhell = DummyModule()
try:
import wikitextparser
except ImportError:
wikitextparser = DummyModule()
try:
import requests
except ImportError:
requests = DummyModule()
WMF_CACERT = 'MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs'
def main(*args: str) -> None:
"""Print pywikibot version and important settings."""
pywikibot.output('Pywikibot: ' + getversion())
pywikibot.output('Release version: ' + pywikibot.__version__)
pywikibot.output('setuptools version: ' + setuptools.__version__)
pywikibot.output('mwparserfromhell version: '
+ mwparserfromhell.__version__)
pywikibot.output('wikitextparser version: ' + wikitextparser.__version__)
pywikibot.output('requests version: ' + requests.__version__)
has_wikimedia_cert = False
if (not hasattr(requests, 'certs')
or not hasattr(requests.certs, 'where')
or not callable(requests.certs.where)):
pywikibot.output(' cacerts: not defined')
elif not os.path.isfile(requests.certs.where()):
pywikibot.output(' cacerts: {} (missing)'.format(
requests.certs.where()))
else:
pywikibot.output(' cacerts: ' + requests.certs.where())
with codecs.open(requests.certs.where(), 'r', 'utf-8') as cert_file:
text = cert_file.read()
if WMF_CACERT in text:
has_wikimedia_cert = True
pywikibot.output(' certificate test: {}'
.format('ok' if has_wikimedia_cert else 'not ok'))
if not has_wikimedia_cert:
pywikibot.output(' Please reinstall requests!')
pywikibot.output('Python: ' + sys.version)
toolforge_env_hostname = get_toolforge_hostname()
if toolforge_env_hostname:
pywikibot.output('Toolforge hostname: ' + toolforge_env_hostname)
# check environment settings
settings = {key for key in os.environ if key.startswith('PYWIKIBOT')}
settings.update(['PYWIKIBOT_DIR', 'PYWIKIBOT_DIR_PWB',
'PYWIKIBOT_NO_USER_CONFIG'])
for environ_name in sorted(settings):
pywikibot.output( | if not usernames:
continue
pywikibot.output('Usernames for family {!r}:'.format(family))
for lang, username in usernames.items():
pywikibot.output('\t{}: {}'.format(lang, username))
if __name__ == '__main__':
main() | '{}: {}'.format(environ_name,
os.environ.get(environ_name, 'Not set') or "''"))
pywikibot.output('Config base dir: ' + pywikibot.config.base_dir)
for family, usernames in pywikibot.config.usernames.items(): | random_line_split |
Subsets and Splits