repo
stringlengths
8
50
commit
stringlengths
40
40
path
stringlengths
5
171
lang
stringclasses
5 values
license
stringclasses
13 values
message
stringlengths
21
1.33k
old_code
stringlengths
15
2.4k
new_code
stringlengths
140
2.61k
n_added
int64
0
81
n_removed
int64
0
58
n_hunks
int64
1
8
change_kind
stringclasses
3 values
udiff
stringlengths
88
3.33k
udiff-h
stringlengths
85
3.32k
udiff-l
stringlengths
95
3.57k
search-replace
stringlengths
89
3.36k
stdlib-js/www
49a8a954c050c5e9efd7eae6830ddbc14d67248f
tools/scripts/ts_docs_path.js
javascript
apache-2.0
Update path to include version
/** * @license Apache-2.0 * * Copyright (c) 2019 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; // MODULES // var join = require( 'path' ).join; var root = require( './root.js' ); // MAIN // /** * Returns the TypeScript documentation path. * * @private * @returns {string} TypeScript documentation path */ function path() { return join( root(), 'public', 'docs', 'ts', 'modules' ); } // EXPORTS // module.exports = path;
/** * @license Apache-2.0 * * Copyright (c) 2019 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; // MODULES // var join = require( 'path' ).join; var root = require( './root.js' ); var version = require( './stdlib_version.js' ); // MAIN // /** * Returns the TypeScript documentation path. * * @private * @returns {string} TypeScript documentation path */ function path() { return join( root(), 'public', 'docs', 'ts', version(), 'modules' ); } // EXPORTS // module.exports = path;
2
1
2
mixed
--- a/tools/scripts/ts_docs_path.js +++ b/tools/scripts/ts_docs_path.js @@ -24,2 +24,3 @@ var root = require( './root.js' ); +var version = require( './stdlib_version.js' ); @@ -35,3 +36,3 @@ function path() { - return join( root(), 'public', 'docs', 'ts', 'modules' ); + return join( root(), 'public', 'docs', 'ts', version(), 'modules' ); }
--- a/tools/scripts/ts_docs_path.js +++ b/tools/scripts/ts_docs_path.js @@ ... @@ var root = require( './root.js' ); +var version = require( './stdlib_version.js' ); @@ ... @@ function path() { - return join( root(), 'public', 'docs', 'ts', 'modules' ); + return join( root(), 'public', 'docs', 'ts', version(), 'modules' ); }
--- a/tools/scripts/ts_docs_path.js +++ b/tools/scripts/ts_docs_path.js @@ -24,2 +24,3 @@ CON var root = require( './root.js' ); ADD var version = require( './stdlib_version.js' ); CON @@ -35,3 +36,3 @@ CON function path() { DEL return join( root(), 'public', 'docs', 'ts', 'modules' ); ADD return join( root(), 'public', 'docs', 'ts', version(), 'modules' ); CON }
<<<<<<< SEARCH var join = require( 'path' ).join; var root = require( './root.js' ); ======= var join = require( 'path' ).join; var root = require( './root.js' ); var version = require( './stdlib_version.js' ); >>>>>>> REPLACE <<<<<<< SEARCH */ function path() { return join( root(), 'public', 'docs', 'ts', 'modules' ); } ======= */ function path() { return join( root(), 'public', 'docs', 'ts', version(), 'modules' ); } >>>>>>> REPLACE
maitaoriana/openacademy-project
e7f1c49216a7b609dbe8ea283cdf689cfa575f85
openacademy/model/openacademy_course.py
python
apache-2.0
[REF] openacademy: Modify copy method into inherit
from openerp import models, fields, api ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' # Model odoo name name = fields.Char(string='Title', required=True) description = fields.Text(string='Description') responsible_id = fields.Many2one('res.users', ondelete = 'set null', string = "Responsible", index = True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ]
from openerp import api, models, fields, api ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' # Model odoo name name = fields.Char(string='Title', required=True) description = fields.Text(string='Description') responsible_id = fields.Many2one('res.users', ondelete = 'set null', string = "Responsible", index = True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ] @api.multi def copy(self, default=None): default = dict(default or {}) copied_count = self.search_count( [('name', '=like', u"Copy of {}%".format(self.name))]) if not copied_count: new_name = u"Copy of {}".format(self.name) else: new_name = u"Copy of {} ({})".format(self.name, copied_count) default['name'] = new_name return super(Course, self).copy(default)
15
1
2
mixed
--- a/openacademy/model/openacademy_course.py +++ b/openacademy/model/openacademy_course.py @@ -1,2 +1,2 @@ -from openerp import models, fields, api +from openerp import api, models, fields, api @@ -32 +32,15 @@ ] + + @api.multi + def copy(self, default=None): + default = dict(default or {}) + + copied_count = self.search_count( + [('name', '=like', u"Copy of {}%".format(self.name))]) + if not copied_count: + new_name = u"Copy of {}".format(self.name) + else: + new_name = u"Copy of {} ({})".format(self.name, copied_count) + + default['name'] = new_name + return super(Course, self).copy(default)
--- a/openacademy/model/openacademy_course.py +++ b/openacademy/model/openacademy_course.py @@ ... @@ -from openerp import models, fields, api +from openerp import api, models, fields, api @@ ... @@ ] + + @api.multi + def copy(self, default=None): + default = dict(default or {}) + + copied_count = self.search_count( + [('name', '=like', u"Copy of {}%".format(self.name))]) + if not copied_count: + new_name = u"Copy of {}".format(self.name) + else: + new_name = u"Copy of {} ({})".format(self.name, copied_count) + + default['name'] = new_name + return super(Course, self).copy(default)
--- a/openacademy/model/openacademy_course.py +++ b/openacademy/model/openacademy_course.py @@ -1,2 +1,2 @@ DEL from openerp import models, fields, api ADD from openerp import api, models, fields, api CON @@ -32 +32,15 @@ CON ] ADD ADD @api.multi ADD def copy(self, default=None): ADD default = dict(default or {}) ADD ADD copied_count = self.search_count( ADD [('name', '=like', u"Copy of {}%".format(self.name))]) ADD if not copied_count: ADD new_name = u"Copy of {}".format(self.name) ADD else: ADD new_name = u"Copy of {} ({})".format(self.name, copied_count) ADD ADD default['name'] = new_name ADD return super(Course, self).copy(default)
<<<<<<< SEARCH from openerp import models, fields, api ''' ======= from openerp import api, models, fields, api ''' >>>>>>> REPLACE <<<<<<< SEARCH "The course title must be unique"), ] ======= "The course title must be unique"), ] @api.multi def copy(self, default=None): default = dict(default or {}) copied_count = self.search_count( [('name', '=like', u"Copy of {}%".format(self.name))]) if not copied_count: new_name = u"Copy of {}".format(self.name) else: new_name = u"Copy of {} ({})".format(self.name, copied_count) default['name'] = new_name return super(Course, self).copy(default) >>>>>>> REPLACE
jenkinsci/startup-trigger-plugin-plugin
876ce161aa78a16adb3e5409d8531c8cb396b001
src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java
java
mit
Enable to specify a string for the trigger label as a substring of the node label. For example a node label with "WinXP Acceptance London" and 'Restricted node Label = London'
package org.jvnet.hudson.plugins.triggers.startup; import hudson.model.Node; import java.io.Serializable; /** * @author Gregory Boissinot */ public class HudsonStartupService implements Serializable { public boolean has2Schedule(HudsonStartupTrigger startupTrigger, Node jobNode) { if (startupTrigger == null) { throw new NullPointerException("A startupTrigger object has to be set."); } if (jobNode == null) { throw new NullPointerException("A node object has to be set."); } String triggerLabel = startupTrigger.getLabel(); return has2Schedule(triggerLabel, jobNode); } private boolean has2Schedule(String triggerLabel, Node jobNode) { String jobNodeName = jobNode.getNodeName(); if (triggerLabel == null) { //Jobs on master has to schedule return isMaster(jobNodeName); } if (triggerLabel.equalsIgnoreCase("master")) { //User set 'master' string, Jobs on master has to schedule return isMaster(jobNodeName); } if (triggerLabel.equalsIgnoreCase(jobNodeName)) { //Match exactly node name return true; } return triggerLabel.equalsIgnoreCase(jobNode.getLabelString()); //Match node label } private boolean isMaster(String nodeName) { //Master node name is "", slave node name is never empty return nodeName.equals(""); } }
package org.jvnet.hudson.plugins.triggers.startup; import hudson.model.Node; import java.io.Serializable; /** * @author Gregory Boissinot */ public class HudsonStartupService implements Serializable { public boolean has2Schedule(HudsonStartupTrigger startupTrigger, Node jobNode) { if (startupTrigger == null) { throw new NullPointerException("A startupTrigger object has to be set."); } if (jobNode == null) { throw new NullPointerException("A node object has to be set."); } String triggerLabel = startupTrigger.getLabel(); return has2Schedule(triggerLabel, jobNode); } private boolean has2Schedule(String triggerLabel, Node jobNode) { String jobNodeName = jobNode.getNodeName(); if (triggerLabel == null) { //Jobs on master has to schedule return isMaster(jobNodeName); } if (triggerLabel.equalsIgnoreCase("master")) { //User set 'master' string, Jobs on master has to schedule return isMaster(jobNodeName); } if (triggerLabel.equalsIgnoreCase(jobNodeName)) { //Match exactly node name return true; } String labelString = jobNode.getLabelString(); if (labelString == null) { return false; } if (triggerLabel.equalsIgnoreCase(labelString)) { //Match node label return true; } return labelString.contains(triggerLabel); } private boolean isMaster(String nodeName) { //Master node name is "", slave node name is never empty return nodeName.equals(""); } }
10
1
1
mixed
--- a/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java +++ b/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java @@ -41,3 +41,12 @@ - return triggerLabel.equalsIgnoreCase(jobNode.getLabelString()); //Match node label + String labelString = jobNode.getLabelString(); + if (labelString == null) { + return false; + } + + if (triggerLabel.equalsIgnoreCase(labelString)) { //Match node label + return true; + } + + return labelString.contains(triggerLabel); }
--- a/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java +++ b/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java @@ ... @@ - return triggerLabel.equalsIgnoreCase(jobNode.getLabelString()); //Match node label + String labelString = jobNode.getLabelString(); + if (labelString == null) { + return false; + } + + if (triggerLabel.equalsIgnoreCase(labelString)) { //Match node label + return true; + } + + return labelString.contains(triggerLabel); }
--- a/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java +++ b/src/main/java/org/jvnet/hudson/plugins/triggers/startup/HudsonStartupService.java @@ -41,3 +41,12 @@ CON DEL return triggerLabel.equalsIgnoreCase(jobNode.getLabelString()); //Match node label ADD String labelString = jobNode.getLabelString(); ADD if (labelString == null) { ADD return false; ADD } ADD ADD if (triggerLabel.equalsIgnoreCase(labelString)) { //Match node label ADD return true; ADD } ADD ADD return labelString.contains(triggerLabel); CON }
<<<<<<< SEARCH } return triggerLabel.equalsIgnoreCase(jobNode.getLabelString()); //Match node label } ======= } String labelString = jobNode.getLabelString(); if (labelString == null) { return false; } if (triggerLabel.equalsIgnoreCase(labelString)) { //Match node label return true; } return labelString.contains(triggerLabel); } >>>>>>> REPLACE
blindpirate/gradle
5314e08bc9ad0b09950643c097d06a659bb5c125
subprojects/workers/build.gradle.kts
kotlin
apache-2.0
Clarify description of workers project
plugins { id("gradlebuild.distribution.api-java") } description = "Infrastructure for running worker processes" dependencies { implementation(project(":base-services")) implementation(project(":messaging")) implementation(project(":logging")) implementation(project(":process-services")) implementation(project(":worker-processes")) implementation(project(":persistent-cache")) implementation(project(":core-api")) implementation(project(":model-core")) implementation(project(":core")) implementation(project(":snapshots")) implementation(project(":file-collections")) implementation(project(":files")) implementation(project(":native")) implementation(project(":resources")) implementation(libs.slf4jApi) implementation(libs.guava) implementation(libs.inject) testImplementation(project(":native")) testImplementation(project(":file-collections")) testImplementation(project(":resources")) testImplementation(project(":snapshots")) testImplementation(testFixtures(project(":core"))) testImplementation(testFixtures(project(":logging"))) integTestRuntimeOnly(project(":kotlin-dsl")) integTestRuntimeOnly(project(":kotlin-dsl-provider-plugins")) integTestRuntimeOnly(project(":api-metadata")) integTestRuntimeOnly(project(":test-kit")) integTestImplementation(project(":jvm-services")) testFixturesImplementation(libs.inject) testFixturesImplementation(libs.groovyJson) testFixturesImplementation(project(":base-services")) testRuntimeOnly(project(":distributions-core")) { because("Tests instantiate DefaultClassLoaderRegistry which requires a 'gradle-plugins.properties' through DefaultPluginModuleRegistry") } integTestDistributionRuntimeOnly(project(":distributions-core")) }
plugins { id("gradlebuild.distribution.api-java") } description = "Infrastructure for starting and managing worker processes" dependencies { implementation(project(":base-services")) implementation(project(":messaging")) implementation(project(":logging")) implementation(project(":process-services")) implementation(project(":worker-processes")) implementation(project(":persistent-cache")) implementation(project(":core-api")) implementation(project(":model-core")) implementation(project(":core")) implementation(project(":snapshots")) implementation(project(":file-collections")) implementation(project(":files")) implementation(project(":native")) implementation(project(":resources")) implementation(libs.slf4jApi) implementation(libs.guava) implementation(libs.inject) testImplementation(project(":native")) testImplementation(project(":file-collections")) testImplementation(project(":resources")) testImplementation(project(":snapshots")) testImplementation(testFixtures(project(":core"))) testImplementation(testFixtures(project(":logging"))) integTestRuntimeOnly(project(":kotlin-dsl")) integTestRuntimeOnly(project(":kotlin-dsl-provider-plugins")) integTestRuntimeOnly(project(":api-metadata")) integTestRuntimeOnly(project(":test-kit")) integTestImplementation(project(":jvm-services")) testFixturesImplementation(libs.inject) testFixturesImplementation(libs.groovyJson) testFixturesImplementation(project(":base-services")) testRuntimeOnly(project(":distributions-core")) { because("Tests instantiate DefaultClassLoaderRegistry which requires a 'gradle-plugins.properties' through DefaultPluginModuleRegistry") } integTestDistributionRuntimeOnly(project(":distributions-core")) }
1
1
1
mixed
--- a/subprojects/workers/build.gradle.kts +++ b/subprojects/workers/build.gradle.kts @@ -4,3 +4,3 @@ -description = "Infrastructure for running worker processes" +description = "Infrastructure for starting and managing worker processes"
--- a/subprojects/workers/build.gradle.kts +++ b/subprojects/workers/build.gradle.kts @@ ... @@ -description = "Infrastructure for running worker processes" +description = "Infrastructure for starting and managing worker processes"
--- a/subprojects/workers/build.gradle.kts +++ b/subprojects/workers/build.gradle.kts @@ -4,3 +4,3 @@ CON DEL description = "Infrastructure for running worker processes" ADD description = "Infrastructure for starting and managing worker processes" CON
<<<<<<< SEARCH } description = "Infrastructure for running worker processes" dependencies { ======= } description = "Infrastructure for starting and managing worker processes" dependencies { >>>>>>> REPLACE
bem-archive/bem-tools
d023e2bd5f88016866f34fd25738c65c822ae45d
lib/techs/v2/css-preprocessor.js
javascript
mit
Add comment about borschik usage
'use strict'; var BORSCHIK_CSS_TECH = require('borschik/lib/techs/css'), Q = require('q'); exports.API_VER = 2; exports.techMixin = { getBuildResultChunk : function(relPath) { return '@import url(' + relPath + ');\n'; }, getBuildSuffixesMap: function() { return { css: ['css'] }; }, processBuildResult : function(res) { var defer = Q.defer(); if (!res) { defer.resolve([]); return defer.promise; } this.compileBuildResult(res, defer); return defer.promise; }, /* stub method, override in your tech's code */ compileBuildResult: function(res, defer) { return defer.resolve([]); }, getBuildResult : function(filteredFiles, destSuffix, output, opts) { return this.__base.apply(this, arguments) .then(function(res) { var tech = new BORSCHIK_CSS_TECH.Tech({ comments : true, freeze : false, minimize : false }), file = new BORSCHIK_CSS_TECH.File(tech, output, 'include'); file.content = file.parse(res.join('')); return this.processBuildResult(file.process(output)); }.bind(this)); } };
'use strict'; var BORSCHIK_CSS_TECH = require('borschik/lib/techs/css'), Q = require('q'); exports.API_VER = 2; exports.techMixin = { getBuildResultChunk : function(relPath) { return '@import url(' + relPath + ');\n'; }, getBuildSuffixesMap: function() { return { css: ['css'] }; }, processBuildResult : function(res) { var defer = Q.defer(); if (!res) { defer.resolve([]); return defer.promise; } this.compileBuildResult(res, defer); return defer.promise; }, /* stub method, override in your tech's code */ compileBuildResult: function(res, defer) { return defer.resolve([]); }, getBuildResult : function(filteredFiles, destSuffix, output, opts) { return this.__base.apply(this, arguments) .then(function(res) { // use borschik here to preserve correct link to the images var tech = new BORSCHIK_CSS_TECH.Tech({ comments : true, freeze : false, minimize : false }), file = new BORSCHIK_CSS_TECH.File(tech, output, 'include'); file.content = file.parse(res.join('')); return this.processBuildResult(file.process(output)); }.bind(this)); } };
1
0
1
add_only
--- a/lib/techs/v2/css-preprocessor.js +++ b/lib/techs/v2/css-preprocessor.js @@ -40,2 +40,3 @@ .then(function(res) { + // use borschik here to preserve correct link to the images var tech = new BORSCHIK_CSS_TECH.Tech({
--- a/lib/techs/v2/css-preprocessor.js +++ b/lib/techs/v2/css-preprocessor.js @@ ... @@ .then(function(res) { + // use borschik here to preserve correct link to the images var tech = new BORSCHIK_CSS_TECH.Tech({
--- a/lib/techs/v2/css-preprocessor.js +++ b/lib/techs/v2/css-preprocessor.js @@ -40,2 +40,3 @@ CON .then(function(res) { ADD // use borschik here to preserve correct link to the images CON var tech = new BORSCHIK_CSS_TECH.Tech({
<<<<<<< SEARCH return this.__base.apply(this, arguments) .then(function(res) { var tech = new BORSCHIK_CSS_TECH.Tech({ comments : true, ======= return this.__base.apply(this, arguments) .then(function(res) { // use borschik here to preserve correct link to the images var tech = new BORSCHIK_CSS_TECH.Tech({ comments : true, >>>>>>> REPLACE
yunity/foodsaving-backend
a15d2956cfd48e0d46d5d4cf567af05641b4c8e6
yunity/api/utils.py
python
agpl-3.0
Implement JSON request validation decorator with @NerdyProjects
from django.http import JsonResponse class ApiBase(object): @classmethod def success(cls, data, status=200): """ :type data: dict :type status: int :rtype JsonResponse """ return JsonResponse(data, status=status) @classmethod def error(cls, error, status=400): """ :type error: str :type status: int :rtype JsonResponse """ return JsonResponse({'error': error}, status=status)
from functools import wraps from json import loads as load_json from django.http import JsonResponse class ApiBase(object): @classmethod def validation_failure(cls, message, status=400): """ :type message: str :type status: int :rtype JsonResponse """ return JsonResponse({'validation_failure': message}, status=status) @classmethod def success(cls, data, status=200): """ :type data: dict :type status: int :rtype JsonResponse """ return JsonResponse(data, status=status) @classmethod def error(cls, error, status=400): """ :type error: str :type status: int :rtype JsonResponse """ return JsonResponse({'error': error}, status=status) def json_request(expected_keys=None): """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. """ expected_keys = expected_keys or [] def decorator(func): @wraps(func) def wrapper(cls, request, *args, **kwargs): data = load_json(request.body.decode('utf8')) for expected_key in expected_keys: value = data.get(expected_key) if not value: return ApiBase.validation_failure('missing key: {}'.format(expected_key)) return func(cls, data, request, *args, **kwargs) return wrapper return decorator
35
0
3
add_only
--- a/yunity/api/utils.py +++ b/yunity/api/utils.py @@ -1 +1,4 @@ +from functools import wraps +from json import loads as load_json + from django.http import JsonResponse @@ -4,2 +7,12 @@ class ApiBase(object): + @classmethod + def validation_failure(cls, message, status=400): + """ + :type message: str + :type status: int + :rtype JsonResponse + + """ + return JsonResponse({'validation_failure': message}, status=status) + @classmethod @@ -23 +36,23 @@ return JsonResponse({'error': error}, status=status) + + +def json_request(expected_keys=None): + """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. + + """ + expected_keys = expected_keys or [] + + def decorator(func): + @wraps(func) + def wrapper(cls, request, *args, **kwargs): + data = load_json(request.body.decode('utf8')) + + for expected_key in expected_keys: + value = data.get(expected_key) + if not value: + return ApiBase.validation_failure('missing key: {}'.format(expected_key)) + + return func(cls, data, request, *args, **kwargs) + + return wrapper + return decorator
--- a/yunity/api/utils.py +++ b/yunity/api/utils.py @@ ... @@ +from functools import wraps +from json import loads as load_json + from django.http import JsonResponse @@ ... @@ class ApiBase(object): + @classmethod + def validation_failure(cls, message, status=400): + """ + :type message: str + :type status: int + :rtype JsonResponse + + """ + return JsonResponse({'validation_failure': message}, status=status) + @classmethod @@ ... @@ return JsonResponse({'error': error}, status=status) + + +def json_request(expected_keys=None): + """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. + + """ + expected_keys = expected_keys or [] + + def decorator(func): + @wraps(func) + def wrapper(cls, request, *args, **kwargs): + data = load_json(request.body.decode('utf8')) + + for expected_key in expected_keys: + value = data.get(expected_key) + if not value: + return ApiBase.validation_failure('missing key: {}'.format(expected_key)) + + return func(cls, data, request, *args, **kwargs) + + return wrapper + return decorator
--- a/yunity/api/utils.py +++ b/yunity/api/utils.py @@ -1 +1,4 @@ ADD from functools import wraps ADD from json import loads as load_json ADD CON from django.http import JsonResponse @@ -4,2 +7,12 @@ CON class ApiBase(object): ADD @classmethod ADD def validation_failure(cls, message, status=400): ADD """ ADD :type message: str ADD :type status: int ADD :rtype JsonResponse ADD ADD """ ADD return JsonResponse({'validation_failure': message}, status=status) ADD CON @classmethod @@ -23 +36,23 @@ CON return JsonResponse({'error': error}, status=status) ADD ADD ADD def json_request(expected_keys=None): ADD """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. ADD ADD """ ADD expected_keys = expected_keys or [] ADD ADD def decorator(func): ADD @wraps(func) ADD def wrapper(cls, request, *args, **kwargs): ADD data = load_json(request.body.decode('utf8')) ADD ADD for expected_key in expected_keys: ADD value = data.get(expected_key) ADD if not value: ADD return ApiBase.validation_failure('missing key: {}'.format(expected_key)) ADD ADD return func(cls, data, request, *args, **kwargs) ADD ADD return wrapper ADD return decorator
<<<<<<< SEARCH from django.http import JsonResponse class ApiBase(object): @classmethod def success(cls, data, status=200): ======= from functools import wraps from json import loads as load_json from django.http import JsonResponse class ApiBase(object): @classmethod def validation_failure(cls, message, status=400): """ :type message: str :type status: int :rtype JsonResponse """ return JsonResponse({'validation_failure': message}, status=status) @classmethod def success(cls, data, status=200): >>>>>>> REPLACE <<<<<<< SEARCH """ return JsonResponse({'error': error}, status=status) ======= """ return JsonResponse({'error': error}, status=status) def json_request(expected_keys=None): """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. """ expected_keys = expected_keys or [] def decorator(func): @wraps(func) def wrapper(cls, request, *args, **kwargs): data = load_json(request.body.decode('utf8')) for expected_key in expected_keys: value = data.get(expected_key) if not value: return ApiBase.validation_failure('missing key: {}'.format(expected_key)) return func(cls, data, request, *args, **kwargs) return wrapper return decorator >>>>>>> REPLACE
ZeroPage/i_have_no_apps
10d170fa4b12345c2450bb3d83f92c2abc0eab3c
app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java
java
mit
Add Helper singleton class for querying/adding notification to db.
package org.zeropage.apps.zeropage.database.notification; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import org.zeropage.apps.zeropage.notification.Notification; import java.util.ArrayList; import java.util.List; public class NotificationHistory { private static NotificationHistory sInstance; private SQLiteDatabase mDatabase; public NotificationHistory getInstance(Context context) { sInstance = new Notifi } private NotificationHistory(Context context) { mDatabase = new NotificationOpenHelper(context).getWritableDatabase(); } public void addToHistory(Notification newNotification) { ContentValues values = NotificationTable.getContentValues(newNotification); mDatabase.insert(NotificationTable.NAME, null, values); } public List<Notification> getAllHistory() { List<Notification> notifications = new ArrayList<>(); try (NotificationCursorWrapper wrapper = queryHistory(null, null)) { wrapper.moveToFirst(); while (!wrapper.isAfterLast()) { notifications.add(wrapper.getNotification()); wrapper.moveToNext(); } } return notifications; } private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) { Cursor cursor = mDatabase.query(NotificationTable.NAME, null, whereClause, whereArgs, null, null, null, null); return new NotificationCursorWrapper(cursor); } }
package org.zeropage.apps.zeropage.database.notification; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import org.zeropage.apps.zeropage.notification.Notification; import java.util.ArrayList; import java.util.List; public class NotificationHistory { private static NotificationHistory sInstance; private SQLiteDatabase mDatabase; public synchronized static NotificationHistory getInstance(Context context) { if (sInstance == null) { sInstance = new NotificationHistory(context.getApplicationContext()); } return sInstance; } private NotificationHistory(Context context) { mDatabase = new NotificationOpenHelper(context).getWritableDatabase(); } public void addToHistory(Notification newNotification) { ContentValues values = NotificationTable.getContentValues(newNotification); mDatabase.insert(NotificationTable.NAME, null, values); } public List<Notification> getAllHistory() { List<Notification> notifications = new ArrayList<>(); try (NotificationCursorWrapper wrapper = queryHistory(null, null)) { wrapper.moveToFirst(); while (!wrapper.isAfterLast()) { notifications.add(wrapper.getNotification()); wrapper.moveToNext(); } } return notifications; } private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) { Cursor cursor = mDatabase.query(NotificationTable.NAME, null, whereClause, whereArgs, null, null, null, null); return new NotificationCursorWrapper(cursor); } }
6
2
1
mixed
--- a/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java +++ b/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java @@ -16,4 +16,8 @@ - public NotificationHistory getInstance(Context context) { - sInstance = new Notifi + public synchronized static NotificationHistory getInstance(Context context) { + if (sInstance == null) { + sInstance = new NotificationHistory(context.getApplicationContext()); + } + + return sInstance; }
--- a/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java +++ b/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java @@ ... @@ - public NotificationHistory getInstance(Context context) { - sInstance = new Notifi + public synchronized static NotificationHistory getInstance(Context context) { + if (sInstance == null) { + sInstance = new NotificationHistory(context.getApplicationContext()); + } + + return sInstance; }
--- a/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java +++ b/app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java @@ -16,4 +16,8 @@ CON DEL public NotificationHistory getInstance(Context context) { DEL sInstance = new Notifi ADD public synchronized static NotificationHistory getInstance(Context context) { ADD if (sInstance == null) { ADD sInstance = new NotificationHistory(context.getApplicationContext()); ADD } ADD ADD return sInstance; CON }
<<<<<<< SEARCH private SQLiteDatabase mDatabase; public NotificationHistory getInstance(Context context) { sInstance = new Notifi } ======= private SQLiteDatabase mDatabase; public synchronized static NotificationHistory getInstance(Context context) { if (sInstance == null) { sInstance = new NotificationHistory(context.getApplicationContext()); } return sInstance; } >>>>>>> REPLACE
m039/beacon-keeper
3617ea84c0920554ce4debdf0278e52b116df542
library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java
java
apache-2.0
Add hack to stop service
/** EstimotoServiceUtil.java --- * * Copyright (C) 2014 Dmitry Mozgin * * Author: Dmitry Mozgin <[email protected]> * * */ package com.m039.estimoto.util; import android.content.Context; import android.content.Intent; import com.m039.estimoto.service.EstimotoService; /** * * * Created: 03/22/14 * * @author Dmitry Mozgin * @version * @since */ public class EstimotoServiceUtil { public static void turnOn(Context ctx) { Intent intent = newEstimotoIntent(ctx, EstimotoService.ACTION_CONTROL); intent.putExtra(EstimotoService.EXTRA_TURN_ON, true); ctx.startService(intent); } public static void turnOff(Context ctx) { Intent intent = newEstimotoIntent(ctx, EstimotoService.ACTION_CONTROL); intent.putExtra(EstimotoService.EXTRA_TURN_OFF, true); ctx.startService(intent); } private static Intent newEstimotoIntent(Context ctx, String action) { Intent intent = new Intent(ctx, EstimotoService.class); intent.setAction(action); return intent; } } // EstimotoServiceUtil
/** EstimotoServiceUtil.java --- * * Copyright (C) 2014 Dmitry Mozgin * * Author: Dmitry Mozgin <[email protected]> * * */ package com.m039.estimoto.util; import android.content.Context; import android.content.Intent; import com.m039.estimoto.service.EstimotoService; /** * * * Created: 03/22/14 * * @author Dmitry Mozgin * @version * @since */ public class EstimotoServiceUtil { public static void turnOn(Context ctx) { Intent intent = newEstimotoIntent(ctx, EstimotoService.ACTION_CONTROL); intent.putExtra(EstimotoService.EXTRA_TURN_ON, true); ctx.startService(intent); } public static void turnOff(Context ctx) { Intent intent = newEstimotoIntent(ctx, EstimotoService.ACTION_CONTROL); intent.putExtra(EstimotoService.EXTRA_TURN_OFF, true); ctx.startService(intent); ctx.stopService(intent); // hack } private static Intent newEstimotoIntent(Context ctx, String action) { Intent intent = new Intent(ctx, EstimotoService.class); intent.setAction(action); return intent; } } // EstimotoServiceUtil
1
0
1
add_only
--- a/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java +++ b/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java @@ -37,2 +37,3 @@ ctx.startService(intent); + ctx.stopService(intent); // hack }
--- a/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java +++ b/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java @@ ... @@ ctx.startService(intent); + ctx.stopService(intent); // hack }
--- a/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java +++ b/library/src/main/java/com/m039/estimoto/util/EstimotoServiceUtil.java @@ -37,2 +37,3 @@ CON ctx.startService(intent); ADD ctx.stopService(intent); // hack CON }
<<<<<<< SEARCH intent.putExtra(EstimotoService.EXTRA_TURN_OFF, true); ctx.startService(intent); } ======= intent.putExtra(EstimotoService.EXTRA_TURN_OFF, true); ctx.startService(intent); ctx.stopService(intent); // hack } >>>>>>> REPLACE
AcornUI/Acorn
a5684668d9bba46bb442aaaa06594c4f8267e0f1
tools/acornui-texture-packer/build.gradle.kts
kotlin
apache-2.0
Add lwjgl backend dependency for texture-packer
plugins { kotlin("multiplatform") `maven-publish` } val KOTLIN_LANGUAGE_VERSION: String by extra val KOTLIN_JVM_TARGET: String by extra kotlin { js { compilations.named("test") { runtimeDependencyFiles } compilations.all { kotlinOptions { moduleKind = "amd" sourceMap = true sourceMapEmbedSources = "always" main = "noCall" } } } jvm { compilations.all { kotlinOptions { jvmTarget = KOTLIN_JVM_TARGET } } } targets.all { compilations.all { kotlinOptions { languageVersion = KOTLIN_LANGUAGE_VERSION apiVersion = KOTLIN_LANGUAGE_VERSION verbose = true } } } sourceSets { commonMain { dependencies { implementation(kotlin("stdlib-common")) implementation(project(":acornui-core")) implementation(project(":acornui-utils")) } } commonTest { dependencies { implementation(kotlin("test-common")) implementation(kotlin("test-annotations-common")) } } named("jvmMain") { dependencies { implementation(kotlin("stdlib-jdk8")) } } named("jvmTest") { dependencies { implementation(kotlin("test")) implementation(kotlin("test-junit")) } } named("jsMain") { dependencies { implementation(kotlin("stdlib-js")) } } named("jsTest") { dependencies { implementation(kotlin("test-js")) } } } }
plugins { kotlin("multiplatform") `maven-publish` } val KOTLIN_LANGUAGE_VERSION: String by extra val KOTLIN_JVM_TARGET: String by extra kotlin { js { compilations.named("test") { runtimeDependencyFiles } compilations.all { kotlinOptions { moduleKind = "amd" sourceMap = true sourceMapEmbedSources = "always" main = "noCall" } } } jvm { compilations.all { kotlinOptions { jvmTarget = KOTLIN_JVM_TARGET } } } targets.all { compilations.all { kotlinOptions { languageVersion = KOTLIN_LANGUAGE_VERSION apiVersion = KOTLIN_LANGUAGE_VERSION verbose = true } } } sourceSets { commonMain { dependencies { implementation(kotlin("stdlib-common")) implementation(project(":acornui-core")) implementation(project(":acornui-utils")) implementation(project(":backends:acornui-lwjgl-backend")) } } commonTest { dependencies { implementation(kotlin("test-common")) implementation(kotlin("test-annotations-common")) } } named("jvmMain") { dependencies { implementation(kotlin("stdlib-jdk8")) } } named("jvmTest") { dependencies { implementation(kotlin("test")) implementation(kotlin("test-junit")) } } named("jsMain") { dependencies { implementation(kotlin("stdlib-js")) } } named("jsTest") { dependencies { implementation(kotlin("test-js")) } } } }
1
0
1
add_only
--- a/tools/acornui-texture-packer/build.gradle.kts +++ b/tools/acornui-texture-packer/build.gradle.kts @@ -44,2 +44,3 @@ implementation(project(":acornui-utils")) + implementation(project(":backends:acornui-lwjgl-backend")) }
--- a/tools/acornui-texture-packer/build.gradle.kts +++ b/tools/acornui-texture-packer/build.gradle.kts @@ ... @@ implementation(project(":acornui-utils")) + implementation(project(":backends:acornui-lwjgl-backend")) }
--- a/tools/acornui-texture-packer/build.gradle.kts +++ b/tools/acornui-texture-packer/build.gradle.kts @@ -44,2 +44,3 @@ CON implementation(project(":acornui-utils")) ADD implementation(project(":backends:acornui-lwjgl-backend")) CON }
<<<<<<< SEARCH implementation(project(":acornui-core")) implementation(project(":acornui-utils")) } } ======= implementation(project(":acornui-core")) implementation(project(":acornui-utils")) implementation(project(":backends:acornui-lwjgl-backend")) } } >>>>>>> REPLACE
bjwbell/servo
ab0d62002516562829151ec207c1c5bb9a7d6d6d
src/components/main/css/select.rs
rust
mpl-2.0
Remove the 'border-width: 0' hack. This was to work-around a netsurfcss quirk.
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use extra::url::Url; use style::Stylesheet; use style::Stylist; use style::selector_matching::UserAgentOrigin; pub fn new_stylist() -> Stylist { let mut stylist = Stylist::new(); stylist.add_stylesheet(html4_default_style(), UserAgentOrigin); stylist.add_stylesheet(servo_default_style(), UserAgentOrigin); stylist } fn html4_default_style() -> Stylesheet { Stylesheet::from_str(html4_default_style_str()) } fn servo_default_style() -> Stylesheet { Stylesheet::from_str(servo_default_style_str()) } fn default_url(name: &str) -> Url { FromStr::from_str(fmt!("http://%s", name)).unwrap() } fn html4_default_style_str() -> &'static str { include_str!("user-agent.css") } // FIXME: this shouldn’t be needed. // The initial value of border-*-width is 'medium' (for which 2px is ok.) // It’s the *computed values* that is set to 0 when the corresponding // border-*-style is 'none' (the initial value) or 'hidden'. // This should be taken care of when removing libcss. fn servo_default_style_str() -> &'static str { // libcss want's this to default to 2px.. "* { border-width: 0px; }" }
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use style::Stylesheet; use style::Stylist; use style::selector_matching::UserAgentOrigin; pub fn new_stylist() -> Stylist { let mut stylist = Stylist::new(); let ua_stylesheet = Stylesheet::from_str(include_str!("user-agent.css")); stylist.add_stylesheet(ua_stylesheet, UserAgentOrigin); stylist }
2
30
2
mixed
--- a/src/components/main/css/select.rs +++ b/src/components/main/css/select.rs @@ -4,3 +4,2 @@ -use extra::url::Url; use style::Stylesheet; @@ -11,32 +10,5 @@ let mut stylist = Stylist::new(); - stylist.add_stylesheet(html4_default_style(), UserAgentOrigin); - stylist.add_stylesheet(servo_default_style(), UserAgentOrigin); + let ua_stylesheet = Stylesheet::from_str(include_str!("user-agent.css")); + stylist.add_stylesheet(ua_stylesheet, UserAgentOrigin); stylist } - -fn html4_default_style() -> Stylesheet { - Stylesheet::from_str(html4_default_style_str()) -} - -fn servo_default_style() -> Stylesheet { - Stylesheet::from_str(servo_default_style_str()) -} - -fn default_url(name: &str) -> Url { - FromStr::from_str(fmt!("http://%s", name)).unwrap() -} - -fn html4_default_style_str() -> &'static str { - include_str!("user-agent.css") -} - - -// FIXME: this shouldn’t be needed. -// The initial value of border-*-width is 'medium' (for which 2px is ok.) -// It’s the *computed values* that is set to 0 when the corresponding -// border-*-style is 'none' (the initial value) or 'hidden'. -// This should be taken care of when removing libcss. -fn servo_default_style_str() -> &'static str { - // libcss want's this to default to 2px.. - "* { border-width: 0px; }" -}
--- a/src/components/main/css/select.rs +++ b/src/components/main/css/select.rs @@ ... @@ -use extra::url::Url; use style::Stylesheet; @@ ... @@ let mut stylist = Stylist::new(); - stylist.add_stylesheet(html4_default_style(), UserAgentOrigin); - stylist.add_stylesheet(servo_default_style(), UserAgentOrigin); + let ua_stylesheet = Stylesheet::from_str(include_str!("user-agent.css")); + stylist.add_stylesheet(ua_stylesheet, UserAgentOrigin); stylist } - -fn html4_default_style() -> Stylesheet { - Stylesheet::from_str(html4_default_style_str()) -} - -fn servo_default_style() -> Stylesheet { - Stylesheet::from_str(servo_default_style_str()) -} - -fn default_url(name: &str) -> Url { - FromStr::from_str(fmt!("http://%s", name)).unwrap() -} - -fn html4_default_style_str() -> &'static str { - include_str!("user-agent.css") -} - - -// FIXME: this shouldn’t be needed. -// The initial value of border-*-width is 'medium' (for which 2px is ok.) -// It’s the *computed values* that is set to 0 when the corresponding -// border-*-style is 'none' (the initial value) or 'hidden'. -// This should be taken care of when removing libcss. -fn servo_default_style_str() -> &'static str { - // libcss want's this to default to 2px.. - "* { border-width: 0px; }" -}
--- a/src/components/main/css/select.rs +++ b/src/components/main/css/select.rs @@ -4,3 +4,2 @@ CON DEL use extra::url::Url; CON use style::Stylesheet; @@ -11,32 +10,5 @@ CON let mut stylist = Stylist::new(); DEL stylist.add_stylesheet(html4_default_style(), UserAgentOrigin); DEL stylist.add_stylesheet(servo_default_style(), UserAgentOrigin); ADD let ua_stylesheet = Stylesheet::from_str(include_str!("user-agent.css")); ADD stylist.add_stylesheet(ua_stylesheet, UserAgentOrigin); CON stylist CON } DEL DEL fn html4_default_style() -> Stylesheet { DEL Stylesheet::from_str(html4_default_style_str()) DEL } DEL DEL fn servo_default_style() -> Stylesheet { DEL Stylesheet::from_str(servo_default_style_str()) DEL } DEL DEL fn default_url(name: &str) -> Url { DEL FromStr::from_str(fmt!("http://%s", name)).unwrap() DEL } DEL DEL fn html4_default_style_str() -> &'static str { DEL include_str!("user-agent.css") DEL } DEL DEL DEL // FIXME: this shouldn’t be needed. DEL // The initial value of border-*-width is 'medium' (for which 2px is ok.) DEL // It’s the *computed values* that is set to 0 when the corresponding DEL // border-*-style is 'none' (the initial value) or 'hidden'. DEL // This should be taken care of when removing libcss. DEL fn servo_default_style_str() -> &'static str { DEL // libcss want's this to default to 2px.. DEL "* { border-width: 0px; }" DEL }
<<<<<<< SEARCH * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use extra::url::Url; use style::Stylesheet; use style::Stylist; ======= * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use style::Stylesheet; use style::Stylist; >>>>>>> REPLACE <<<<<<< SEARCH pub fn new_stylist() -> Stylist { let mut stylist = Stylist::new(); stylist.add_stylesheet(html4_default_style(), UserAgentOrigin); stylist.add_stylesheet(servo_default_style(), UserAgentOrigin); stylist } fn html4_default_style() -> Stylesheet { Stylesheet::from_str(html4_default_style_str()) } fn servo_default_style() -> Stylesheet { Stylesheet::from_str(servo_default_style_str()) } fn default_url(name: &str) -> Url { FromStr::from_str(fmt!("http://%s", name)).unwrap() } fn html4_default_style_str() -> &'static str { include_str!("user-agent.css") } // FIXME: this shouldn’t be needed. // The initial value of border-*-width is 'medium' (for which 2px is ok.) // It’s the *computed values* that is set to 0 when the corresponding // border-*-style is 'none' (the initial value) or 'hidden'. // This should be taken care of when removing libcss. fn servo_default_style_str() -> &'static str { // libcss want's this to default to 2px.. "* { border-width: 0px; }" } ======= pub fn new_stylist() -> Stylist { let mut stylist = Stylist::new(); let ua_stylesheet = Stylesheet::from_str(include_str!("user-agent.css")); stylist.add_stylesheet(ua_stylesheet, UserAgentOrigin); stylist } >>>>>>> REPLACE
vase4kin/TeamCityApp
2a81e9b207fa83fa57228e930e0e618b698e97b9
app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java
java
apache-2.0
Increase build types cache life for one day
/* * Copyright 2016 Andrey Tolpeev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.vase4kin.teamcityapp.api.cache; import com.github.vase4kin.teamcityapp.navigation.api.NavigationNode; import com.github.vase4kin.teamcityapp.runbuild.api.Branches; import java.util.concurrent.TimeUnit; import io.rx_cache.DynamicKey; import io.rx_cache.EvictDynamicKey; import io.rx_cache.LifeCache; import rx.Observable; /** * Cache providers */ public interface CacheProviders { // TODO: Increase cache to 24 hours? Good idea, huh? @LifeCache(duration = 1, timeUnit = TimeUnit.HOURS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey); @LifeCache(duration = 1, timeUnit = TimeUnit.MINUTES) Observable<Branches> listBranches(Observable<Branches> branchesObservable, DynamicKey buildTypeId); }
/* * Copyright 2016 Andrey Tolpeev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.vase4kin.teamcityapp.api.cache; import com.github.vase4kin.teamcityapp.navigation.api.NavigationNode; import com.github.vase4kin.teamcityapp.runbuild.api.Branches; import java.util.concurrent.TimeUnit; import io.rx_cache.DynamicKey; import io.rx_cache.EvictDynamicKey; import io.rx_cache.LifeCache; import rx.Observable; /** * Cache providers */ public interface CacheProviders { @LifeCache(duration = 1, timeUnit = TimeUnit.DAYS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey); @LifeCache(duration = 1, timeUnit = TimeUnit.MINUTES) Observable<Branches> listBranches(Observable<Branches> branchesObservable, DynamicKey buildTypeId); }
1
2
1
mixed
--- a/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java +++ b/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java @@ -33,4 +33,3 @@ - // TODO: Increase cache to 24 hours? Good idea, huh? - @LifeCache(duration = 1, timeUnit = TimeUnit.HOURS) + @LifeCache(duration = 1, timeUnit = TimeUnit.DAYS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey);
--- a/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java +++ b/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java @@ ... @@ - // TODO: Increase cache to 24 hours? Good idea, huh? - @LifeCache(duration = 1, timeUnit = TimeUnit.HOURS) + @LifeCache(duration = 1, timeUnit = TimeUnit.DAYS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey);
--- a/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java +++ b/app/src/main/java/com/github/vase4kin/teamcityapp/api/cache/CacheProviders.java @@ -33,4 +33,3 @@ CON DEL // TODO: Increase cache to 24 hours? Good idea, huh? DEL @LifeCache(duration = 1, timeUnit = TimeUnit.HOURS) ADD @LifeCache(duration = 1, timeUnit = TimeUnit.DAYS) CON Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey);
<<<<<<< SEARCH public interface CacheProviders { // TODO: Increase cache to 24 hours? Good idea, huh? @LifeCache(duration = 1, timeUnit = TimeUnit.HOURS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey); ======= public interface CacheProviders { @LifeCache(duration = 1, timeUnit = TimeUnit.DAYS) Observable<NavigationNode> listBuildTypes(Observable<NavigationNode> navigationNodeObservable, DynamicKey dynamicKey, EvictDynamicKey evictDynamicKey); >>>>>>> REPLACE
knaik94/RewritingCore
af8407ce2744682301e275d6f0e7bb18a2fdd0d0
FizzBuzzTester.java
java
mit
Fix error off by one error related to constructor with stopPoint, added output
public class FizzBuzzTester{ public static void main(String[] args){ FizzBuzzer firstFizz = new FizzBuzzer(20); System.out.println(firstFizz); } } class FizzBuzzer{ public int fizz; public int buzz; public int stopPoint; public FizzBuzzer(){ fizz = 3; buzz = 5; stopPoint = 30; }; public FizzBuzzer(int stop){ this(); stopPoint = stop; } public String toString(){ String temp = ""; for (int i = 1; i < stopPoint; i++){ if (i % 3 == 0) { temp += "fizz"; } if (i % 5 == 0) { temp += "buzz"; } if (i % 3 != 0 && i % 5 != 0){ temp += i; } if( i < stopPoint -1) { temp+= " "; } } return temp; } }
public class FizzBuzzTester{ public static void main(String[] args){ FizzBuzzer firstFizz = new FizzBuzzer(20); System.out.println(firstFizz); //output: 1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16 17 fizz 19 buzz } } class FizzBuzzer{ public int fizz; public int buzz; public int stopPoint; public FizzBuzzer(){ fizz = 3; buzz = 5; stopPoint = 30; }; public FizzBuzzer(int stop){ this(); stopPoint = stop + 1; } public String toString(){ String temp = ""; for (int i = 1; i < stopPoint; i++){ if (i % 3 == 0) { temp += "fizz"; } if (i % 5 == 0) { temp += "buzz"; } if (i % 3 != 0 && i % 5 != 0){ temp += i; } if( i < stopPoint -1) { temp+= " "; } } return temp; } }
3
2
2
mixed
--- a/FizzBuzzTester.java +++ b/FizzBuzzTester.java @@ -3,3 +3,4 @@ FizzBuzzer firstFizz = new FizzBuzzer(20); - System.out.println(firstFizz); + System.out.println(firstFizz); + //output: 1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16 17 fizz 19 buzz } @@ -20,3 +21,3 @@ this(); - stopPoint = stop; + stopPoint = stop + 1; }
--- a/FizzBuzzTester.java +++ b/FizzBuzzTester.java @@ ... @@ FizzBuzzer firstFizz = new FizzBuzzer(20); - System.out.println(firstFizz); + System.out.println(firstFizz); + //output: 1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16 17 fizz 19 buzz } @@ ... @@ this(); - stopPoint = stop; + stopPoint = stop + 1; }
--- a/FizzBuzzTester.java +++ b/FizzBuzzTester.java @@ -3,3 +3,4 @@ CON FizzBuzzer firstFizz = new FizzBuzzer(20); DEL System.out.println(firstFizz); ADD System.out.println(firstFizz); ADD //output: 1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16 17 fizz 19 buzz CON } @@ -20,3 +21,3 @@ CON this(); DEL stopPoint = stop; ADD stopPoint = stop + 1; CON }
<<<<<<< SEARCH public static void main(String[] args){ FizzBuzzer firstFizz = new FizzBuzzer(20); System.out.println(firstFizz); } } ======= public static void main(String[] args){ FizzBuzzer firstFizz = new FizzBuzzer(20); System.out.println(firstFizz); //output: 1 2 fizz 4 buzz fizz 7 8 fizz buzz 11 fizz 13 14 fizzbuzz 16 17 fizz 19 buzz } } >>>>>>> REPLACE <<<<<<< SEARCH public FizzBuzzer(int stop){ this(); stopPoint = stop; } ======= public FizzBuzzer(int stop){ this(); stopPoint = stop + 1; } >>>>>>> REPLACE
Deisss/python-sockjsroom
fe5ddba257885aa166bd71696a6eeefad153e66a
setup.py
python
mit
Switch to OSI license for Pypi
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "[email protected]", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: MIT Licence", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "[email protected]", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
1
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -36,3 +36,3 @@ "Development Status :: 1 - Planning", - "License :: MIT Licence", + "License :: OSI Approved :: MIT License", "Natural Language :: English",
--- a/setup.py +++ b/setup.py @@ ... @@ "Development Status :: 1 - Planning", - "License :: MIT Licence", + "License :: OSI Approved :: MIT License", "Natural Language :: English",
--- a/setup.py +++ b/setup.py @@ -36,3 +36,3 @@ CON "Development Status :: 1 - Planning", DEL "License :: MIT Licence", ADD "License :: OSI Approved :: MIT License", CON "Natural Language :: English",
<<<<<<< SEARCH "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: MIT Licence", "Natural Language :: English", "Operating System :: OS Independent", ======= "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", >>>>>>> REPLACE
geekygirlsarah/adventofcode2016
5c620a504327696b9cfe3ffc423ae7ae6e915e78
dec02/dec02part1.py
python
mit
Add 12/2 part 1 solution
# Advent of Code # Dec 2, Part 1 # @geekygirlsarah
# Advent of Code # Dec 2, Part 1 # @geekygirlsarah inputFile = "input.txt" # Tracking vars finalCode = "" lastNumber = 5 # start here tempNumber = 0 with open(inputFile) as f: while True: line = f.readline(-1) if not line: # print "End of file" break # print ("Line: ", line) print ("First number=" + str(lastNumber)) for dir in line: print("dir=" + dir) if dir == "U": tempNumber = lastNumber - 3 elif dir == "D": tempNumber = lastNumber + 3 elif dir == "L": tempNumber = lastNumber - 1 elif dir == "R": tempNumber = lastNumber + 1 elif dir == "\n": break # Boundary checks to undo out of bounds if dir == "U" and tempNumber < 1: tempNumber = lastNumber elif dir == "D" and tempNumber > 9: tempNumber = lastNumber elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6): tempNumber = lastNumber elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4): tempNumber = lastNumber print ("New number: " + str(tempNumber)) lastNumber = tempNumber # last number validated, so add to code finalCode = finalCode + str(tempNumber) print("Final code: " + finalCode)
46
0
1
add_only
--- a/dec02/dec02part1.py +++ b/dec02/dec02part1.py @@ -4 +4,47 @@ +inputFile = "input.txt" + +# Tracking vars +finalCode = "" +lastNumber = 5 # start here +tempNumber = 0 + +with open(inputFile) as f: + while True: + line = f.readline(-1) + if not line: + # print "End of file" + break + # print ("Line: ", line) + + print ("First number=" + str(lastNumber)) + for dir in line: + print("dir=" + dir) + if dir == "U": + tempNumber = lastNumber - 3 + elif dir == "D": + tempNumber = lastNumber + 3 + elif dir == "L": + tempNumber = lastNumber - 1 + elif dir == "R": + tempNumber = lastNumber + 1 + elif dir == "\n": + break + + # Boundary checks to undo out of bounds + if dir == "U" and tempNumber < 1: + tempNumber = lastNumber + elif dir == "D" and tempNumber > 9: + tempNumber = lastNumber + elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6): + tempNumber = lastNumber + elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4): + tempNumber = lastNumber + + print ("New number: " + str(tempNumber)) + lastNumber = tempNumber + + # last number validated, so add to code + finalCode = finalCode + str(tempNumber) + +print("Final code: " + finalCode)
--- a/dec02/dec02part1.py +++ b/dec02/dec02part1.py @@ ... @@ +inputFile = "input.txt" + +# Tracking vars +finalCode = "" +lastNumber = 5 # start here +tempNumber = 0 + +with open(inputFile) as f: + while True: + line = f.readline(-1) + if not line: + # print "End of file" + break + # print ("Line: ", line) + + print ("First number=" + str(lastNumber)) + for dir in line: + print("dir=" + dir) + if dir == "U": + tempNumber = lastNumber - 3 + elif dir == "D": + tempNumber = lastNumber + 3 + elif dir == "L": + tempNumber = lastNumber - 1 + elif dir == "R": + tempNumber = lastNumber + 1 + elif dir == "\n": + break + + # Boundary checks to undo out of bounds + if dir == "U" and tempNumber < 1: + tempNumber = lastNumber + elif dir == "D" and tempNumber > 9: + tempNumber = lastNumber + elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6): + tempNumber = lastNumber + elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4): + tempNumber = lastNumber + + print ("New number: " + str(tempNumber)) + lastNumber = tempNumber + + # last number validated, so add to code + finalCode = finalCode + str(tempNumber) + +print("Final code: " + finalCode)
--- a/dec02/dec02part1.py +++ b/dec02/dec02part1.py @@ -4 +4,47 @@ CON ADD inputFile = "input.txt" ADD ADD # Tracking vars ADD finalCode = "" ADD lastNumber = 5 # start here ADD tempNumber = 0 ADD ADD with open(inputFile) as f: ADD while True: ADD line = f.readline(-1) ADD if not line: ADD # print "End of file" ADD break ADD # print ("Line: ", line) ADD ADD print ("First number=" + str(lastNumber)) ADD for dir in line: ADD print("dir=" + dir) ADD if dir == "U": ADD tempNumber = lastNumber - 3 ADD elif dir == "D": ADD tempNumber = lastNumber + 3 ADD elif dir == "L": ADD tempNumber = lastNumber - 1 ADD elif dir == "R": ADD tempNumber = lastNumber + 1 ADD elif dir == "\n": ADD break ADD ADD # Boundary checks to undo out of bounds ADD if dir == "U" and tempNumber < 1: ADD tempNumber = lastNumber ADD elif dir == "D" and tempNumber > 9: ADD tempNumber = lastNumber ADD elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6): ADD tempNumber = lastNumber ADD elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4): ADD tempNumber = lastNumber ADD ADD print ("New number: " + str(tempNumber)) ADD lastNumber = tempNumber ADD ADD # last number validated, so add to code ADD finalCode = finalCode + str(tempNumber) ADD ADD print("Final code: " + finalCode)
<<<<<<< SEARCH # @geekygirlsarah ======= # @geekygirlsarah inputFile = "input.txt" # Tracking vars finalCode = "" lastNumber = 5 # start here tempNumber = 0 with open(inputFile) as f: while True: line = f.readline(-1) if not line: # print "End of file" break # print ("Line: ", line) print ("First number=" + str(lastNumber)) for dir in line: print("dir=" + dir) if dir == "U": tempNumber = lastNumber - 3 elif dir == "D": tempNumber = lastNumber + 3 elif dir == "L": tempNumber = lastNumber - 1 elif dir == "R": tempNumber = lastNumber + 1 elif dir == "\n": break # Boundary checks to undo out of bounds if dir == "U" and tempNumber < 1: tempNumber = lastNumber elif dir == "D" and tempNumber > 9: tempNumber = lastNumber elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6): tempNumber = lastNumber elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4): tempNumber = lastNumber print ("New number: " + str(tempNumber)) lastNumber = tempNumber # last number validated, so add to code finalCode = finalCode + str(tempNumber) print("Final code: " + finalCode) >>>>>>> REPLACE
cketti/MateLightAndroid
c86c1f98e39139970f83ebd9f97ef49e3452ad99
matelight/src/main/java/de/cketti/matelight/MateLight.java
java
apache-2.0
Change host name to ml.jaseg.net
package de.cketti.matelight; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; public class MateLight { private static final String HOST = "matelight.cbrp3.c-base.org"; private static final int PORT = 1337; private static final String SUCCESS = "KTHXBYE!"; private static final int TIMEOUT = 60 * 1000; private Socket mSocket; public void sendMessage(String message) throws IOException { mSocket = new Socket(); mSocket.connect(new InetSocketAddress(HOST, PORT)); try { OutputStream out = mSocket.getOutputStream(); try { mSocket.setSoTimeout(TIMEOUT); out.write(message.getBytes()); out.write('\n'); out.flush(); InputStream in = mSocket.getInputStream(); try { BufferedReader buf = new BufferedReader(new InputStreamReader(in)); String response = buf.readLine(); if (!SUCCESS.equals(response)) { throw new RuntimeException("No success message from server"); } } finally { in.close(); } } finally { out.close(); } } finally { mSocket.close(); } } public void cancel() { try { mSocket.close(); } catch (Exception e) { // ignore } } }
package de.cketti.matelight; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; public class MateLight { private static final String HOST = "ml.jaseg.net"; private static final int PORT = 1337; private static final String SUCCESS = "KTHXBYE!"; private static final int TIMEOUT = 60 * 1000; private Socket mSocket; public void sendMessage(String message) throws IOException { mSocket = new Socket(); mSocket.connect(new InetSocketAddress(HOST, PORT)); try { OutputStream out = mSocket.getOutputStream(); try { mSocket.setSoTimeout(TIMEOUT); out.write(message.getBytes()); out.write('\n'); out.flush(); InputStream in = mSocket.getInputStream(); try { BufferedReader buf = new BufferedReader(new InputStreamReader(in)); String response = buf.readLine(); if (!SUCCESS.equals(response)) { throw new RuntimeException("No success message from server"); } } finally { in.close(); } } finally { out.close(); } } finally { mSocket.close(); } } public void cancel() { try { mSocket.close(); } catch (Exception e) { // ignore } } }
1
1
1
mixed
--- a/matelight/src/main/java/de/cketti/matelight/MateLight.java +++ b/matelight/src/main/java/de/cketti/matelight/MateLight.java @@ -11,3 +11,3 @@ public class MateLight { - private static final String HOST = "matelight.cbrp3.c-base.org"; + private static final String HOST = "ml.jaseg.net"; private static final int PORT = 1337;
--- a/matelight/src/main/java/de/cketti/matelight/MateLight.java +++ b/matelight/src/main/java/de/cketti/matelight/MateLight.java @@ ... @@ public class MateLight { - private static final String HOST = "matelight.cbrp3.c-base.org"; + private static final String HOST = "ml.jaseg.net"; private static final int PORT = 1337;
--- a/matelight/src/main/java/de/cketti/matelight/MateLight.java +++ b/matelight/src/main/java/de/cketti/matelight/MateLight.java @@ -11,3 +11,3 @@ CON public class MateLight { DEL private static final String HOST = "matelight.cbrp3.c-base.org"; ADD private static final String HOST = "ml.jaseg.net"; CON private static final int PORT = 1337;
<<<<<<< SEARCH public class MateLight { private static final String HOST = "matelight.cbrp3.c-base.org"; private static final int PORT = 1337; private static final String SUCCESS = "KTHXBYE!"; ======= public class MateLight { private static final String HOST = "ml.jaseg.net"; private static final int PORT = 1337; private static final String SUCCESS = "KTHXBYE!"; >>>>>>> REPLACE
Omenia/robotframework-whitelibrary
00e4663940ed1d22e768b3de3d1c645c8649aecc
src/WhiteLibrary/keywords/items/textbox.py
python
apache-2.0
Change to better argument name
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
3
3
2
mixed
--- a/src/WhiteLibrary/keywords/items/textbox.py +++ b/src/WhiteLibrary/keywords/items/textbox.py @@ -7,3 +7,3 @@ @keyword - def input_text_to_textbox(self, locator, input): + def input_text_to_textbox(self, locator, input_value): """ @@ -13,6 +13,6 @@ - ``input`` is the text to write. + ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) - textBox.Text = input + textBox.Text = input_value
--- a/src/WhiteLibrary/keywords/items/textbox.py +++ b/src/WhiteLibrary/keywords/items/textbox.py @@ ... @@ @keyword - def input_text_to_textbox(self, locator, input): + def input_text_to_textbox(self, locator, input_value): """ @@ ... @@ - ``input`` is the text to write. + ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) - textBox.Text = input + textBox.Text = input_value
--- a/src/WhiteLibrary/keywords/items/textbox.py +++ b/src/WhiteLibrary/keywords/items/textbox.py @@ -7,3 +7,3 @@ CON @keyword DEL def input_text_to_textbox(self, locator, input): ADD def input_text_to_textbox(self, locator, input_value): CON """ @@ -13,6 +13,6 @@ CON DEL ``input`` is the text to write. ADD ``input_value`` is the text to write. CON """ CON textBox = self.state._get_typed_item_by_locator(TextBox, locator) DEL textBox.Text = input ADD textBox.Text = input_value CON
<<<<<<< SEARCH class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword ======= class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword >>>>>>> REPLACE
facundovictor/non-blocking-socket-samples
9a698d1428fbe0744c9dba3532b778569dbe1dd4
server.py
python
mit
Add docstrings and author reference
import socket import sys class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ self.sock.connect((host, port))
""" A Simple Server class that allows to configure a socket in a very simple way. It is for studying purposes only. """ import socket import sys __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" class SimpleServer(object): """Simple server using the socket library""" def __init__(self, blocking=False, connection_oriented=True): """ The constructor initializes socket specifying the blocking status and if it must be a connection oriented socket. :param blocking: A flag that specifies if the socket must be blocking :ptype: Boolean :param connection_oriented: A flag that specifies if the socket must be connection oriented or not :ptype: Boolean """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not blocking: self.sock.setblocking(0) def connect(self, host, port): """ Connects the server to the "host", and prepares it to listen on "port" :param host: The network layer identifier of an interface :ptype: String or Integer (see help(socket)) :param port: The transport layer identifier of an application :ptype: Integer """ server_address = (host, port) self.sock.connect(server_address) print('starting up on %s port %s' % server_address)
13
1
2
mixed
--- a/server.py +++ b/server.py @@ -1,3 +1,13 @@ +""" +A Simple Server class that allows to configure a socket in a very simple way. +It is for studying purposes only. +""" + import socket import sys + + +__author__ = "Facundo Victor" +__license__ = "MIT" +__email__ = "[email protected]" @@ -33,2 +43,4 @@ """ - self.sock.connect((host, port)) + server_address = (host, port) + self.sock.connect(server_address) + print('starting up on %s port %s' % server_address)
--- a/server.py +++ b/server.py @@ ... @@ +""" +A Simple Server class that allows to configure a socket in a very simple way. +It is for studying purposes only. +""" + import socket import sys + + +__author__ = "Facundo Victor" +__license__ = "MIT" +__email__ = "[email protected]" @@ ... @@ """ - self.sock.connect((host, port)) + server_address = (host, port) + self.sock.connect(server_address) + print('starting up on %s port %s' % server_address)
--- a/server.py +++ b/server.py @@ -1,3 +1,13 @@ ADD """ ADD A Simple Server class that allows to configure a socket in a very simple way. ADD It is for studying purposes only. ADD """ ADD CON import socket CON import sys ADD ADD ADD __author__ = "Facundo Victor" ADD __license__ = "MIT" ADD __email__ = "[email protected]" CON @@ -33,2 +43,4 @@ CON """ DEL self.sock.connect((host, port)) ADD server_address = (host, port) ADD self.sock.connect(server_address) ADD print('starting up on %s port %s' % server_address)
<<<<<<< SEARCH import socket import sys ======= """ A Simple Server class that allows to configure a socket in a very simple way. It is for studying purposes only. """ import socket import sys __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" >>>>>>> REPLACE <<<<<<< SEARCH :ptype: Integer """ self.sock.connect((host, port)) ======= :ptype: Integer """ server_address = (host, port) self.sock.connect(server_address) print('starting up on %s port %s' % server_address) >>>>>>> REPLACE
nolimits4web/Swiper
b26650e10cef116b87f6ee37f2797d39a4292387
src/components/core/update/updateSize.js
javascript
mit
Fix NaN sizes in SSR
import { extend } from '../../../utils/utils'; export default function updateSize() { const swiper = this; let width; let height; const $el = swiper.$el; if (typeof swiper.params.width !== 'undefined') { width = swiper.params.width; } else { width = $el[0].clientWidth; } if (typeof swiper.params.height !== 'undefined') { height = swiper.params.height; } else { height = $el[0].clientHeight; } if ((width === 0 && swiper.isHorizontal()) || (height === 0 && swiper.isVertical())) { return; } // Subtract paddings width = width - parseInt($el.css('padding-left') || 0, 10) - parseInt($el.css('padding-right') || 0, 10); height = height - parseInt($el.css('padding-top') || 0, 10) - parseInt($el.css('padding-bottom') || 0, 10); extend(swiper, { width, height, size: swiper.isHorizontal() ? width : height, }); }
import { extend } from '../../../utils/utils'; export default function updateSize() { const swiper = this; let width; let height; const $el = swiper.$el; if (typeof swiper.params.width !== 'undefined' && swiper.params.width !== null) { width = swiper.params.width; } else { width = $el[0].clientWidth; } if (typeof swiper.params.height !== 'undefined' && swiper.params.width !== null) { height = swiper.params.height; } else { height = $el[0].clientHeight; } if ((width === 0 && swiper.isHorizontal()) || (height === 0 && swiper.isVertical())) { return; } // Subtract paddings width = width - parseInt($el.css('padding-left') || 0, 10) - parseInt($el.css('padding-right') || 0, 10); height = height - parseInt($el.css('padding-top') || 0, 10) - parseInt($el.css('padding-bottom') || 0, 10); if (Number.isNaN(width)) width = 0; if (Number.isNaN(height)) height = 0; extend(swiper, { width, height, size: swiper.isHorizontal() ? width : height, }); }
5
2
3
mixed
--- a/src/components/core/update/updateSize.js +++ b/src/components/core/update/updateSize.js @@ -7,3 +7,3 @@ const $el = swiper.$el; - if (typeof swiper.params.width !== 'undefined') { + if (typeof swiper.params.width !== 'undefined' && swiper.params.width !== null) { width = swiper.params.width; @@ -12,3 +12,3 @@ } - if (typeof swiper.params.height !== 'undefined') { + if (typeof swiper.params.height !== 'undefined' && swiper.params.width !== null) { height = swiper.params.height; @@ -31,2 +31,5 @@ + if (Number.isNaN(width)) width = 0; + if (Number.isNaN(height)) height = 0; + extend(swiper, {
--- a/src/components/core/update/updateSize.js +++ b/src/components/core/update/updateSize.js @@ ... @@ const $el = swiper.$el; - if (typeof swiper.params.width !== 'undefined') { + if (typeof swiper.params.width !== 'undefined' && swiper.params.width !== null) { width = swiper.params.width; @@ ... @@ } - if (typeof swiper.params.height !== 'undefined') { + if (typeof swiper.params.height !== 'undefined' && swiper.params.width !== null) { height = swiper.params.height; @@ ... @@ + if (Number.isNaN(width)) width = 0; + if (Number.isNaN(height)) height = 0; + extend(swiper, {
--- a/src/components/core/update/updateSize.js +++ b/src/components/core/update/updateSize.js @@ -7,3 +7,3 @@ CON const $el = swiper.$el; DEL if (typeof swiper.params.width !== 'undefined') { ADD if (typeof swiper.params.width !== 'undefined' && swiper.params.width !== null) { CON width = swiper.params.width; @@ -12,3 +12,3 @@ CON } DEL if (typeof swiper.params.height !== 'undefined') { ADD if (typeof swiper.params.height !== 'undefined' && swiper.params.width !== null) { CON height = swiper.params.height; @@ -31,2 +31,5 @@ CON ADD if (Number.isNaN(width)) width = 0; ADD if (Number.isNaN(height)) height = 0; ADD CON extend(swiper, {
<<<<<<< SEARCH let height; const $el = swiper.$el; if (typeof swiper.params.width !== 'undefined') { width = swiper.params.width; } else { width = $el[0].clientWidth; } if (typeof swiper.params.height !== 'undefined') { height = swiper.params.height; } else { ======= let height; const $el = swiper.$el; if (typeof swiper.params.width !== 'undefined' && swiper.params.width !== null) { width = swiper.params.width; } else { width = $el[0].clientWidth; } if (typeof swiper.params.height !== 'undefined' && swiper.params.width !== null) { height = swiper.params.height; } else { >>>>>>> REPLACE <<<<<<< SEARCH parseInt($el.css('padding-bottom') || 0, 10); extend(swiper, { width, ======= parseInt($el.css('padding-bottom') || 0, 10); if (Number.isNaN(width)) width = 0; if (Number.isNaN(height)) height = 0; extend(swiper, { width, >>>>>>> REPLACE
dtolnay/syn
c144538e0541f49f4378662cc56ece161126dec3
tests/test_expr.rs
rust
apache-2.0
Add snapshot test for group with trailer expr
#[macro_use] mod macros; use std::str::FromStr; use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; use quote::quote; use std::iter::FromIterator; use syn::{Expr, ExprRange}; #[test] fn test_expr_parse() { let code = "..100u32"; let tt = TokenStream::from_str(code).unwrap(); let expr: Expr = syn::parse2(tt.clone()).unwrap(); let expr_range: ExprRange = syn::parse2(tt).unwrap(); assert_eq!(expr, Expr::Range(expr_range)); } #[test] fn test_await() { // Must not parse as Expr::Field. let expr = syn::parse_str::<Expr>("fut.await").unwrap(); snapshot!(expr, @r###" Expr::Await { base: Expr::Path { path: Path { segments: [ PathSegment { ident: "fut", arguments: None, }, ], }, }, } "###); } #[test] fn test_macro_variable_func() { // mimics the token stream corresponding to `$fn()` let tokens = TokenStream::from_iter(vec![ TokenTree::Group(Group::new(Delimiter::None, quote! { f })), TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), ]); syn::parse2::<Expr>(tokens).unwrap(); }
#[macro_use] mod macros; use std::str::FromStr; use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; use quote::quote; use std::iter::FromIterator; use syn::{Expr, ExprRange}; #[test] fn test_expr_parse() { let code = "..100u32"; let tt = TokenStream::from_str(code).unwrap(); let expr: Expr = syn::parse2(tt.clone()).unwrap(); let expr_range: ExprRange = syn::parse2(tt).unwrap(); assert_eq!(expr, Expr::Range(expr_range)); } #[test] fn test_await() { // Must not parse as Expr::Field. let expr = syn::parse_str::<Expr>("fut.await").unwrap(); snapshot!(expr, @r###" Expr::Await { base: Expr::Path { path: Path { segments: [ PathSegment { ident: "fut", arguments: None, }, ], }, }, } "###); } #[test] fn test_macro_variable_func() { // mimics the token stream corresponding to `$fn()` let tokens = TokenStream::from_iter(vec![ TokenTree::Group(Group::new(Delimiter::None, quote! { f })), TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), ]); snapshot!(tokens as Expr, @r###" Expr::Call { func: Expr::Group { expr: Expr::Path { path: Path { segments: [ PathSegment { ident: "f", arguments: None, }, ], }, }, }, } "###); }
16
1
1
mixed
--- a/tests/test_expr.rs +++ b/tests/test_expr.rs @@ -48,3 +48,18 @@ - syn::parse2::<Expr>(tokens).unwrap(); + snapshot!(tokens as Expr, @r###" + Expr::Call { + func: Expr::Group { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "f", + arguments: None, + }, + ], + }, + }, + }, + } + "###); }
--- a/tests/test_expr.rs +++ b/tests/test_expr.rs @@ ... @@ - syn::parse2::<Expr>(tokens).unwrap(); + snapshot!(tokens as Expr, @r###" + Expr::Call { + func: Expr::Group { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "f", + arguments: None, + }, + ], + }, + }, + }, + } + "###); }
--- a/tests/test_expr.rs +++ b/tests/test_expr.rs @@ -48,3 +48,18 @@ CON DEL syn::parse2::<Expr>(tokens).unwrap(); ADD snapshot!(tokens as Expr, @r###" ADD Expr::Call { ADD func: Expr::Group { ADD expr: Expr::Path { ADD path: Path { ADD segments: [ ADD PathSegment { ADD ident: "f", ADD arguments: None, ADD }, ADD ], ADD }, ADD }, ADD }, ADD } ADD "###); CON }
<<<<<<< SEARCH ]); syn::parse2::<Expr>(tokens).unwrap(); } ======= ]); snapshot!(tokens as Expr, @r###" Expr::Call { func: Expr::Group { expr: Expr::Path { path: Path { segments: [ PathSegment { ident: "f", arguments: None, }, ], }, }, }, } "###); } >>>>>>> REPLACE
sgrif/diesel
0e12f13e7cc5dc56fa30a55bc5380ee5f025b61d
src/result.rs
rust
apache-2.0
Implement `Error` for our errors What a concept! (Specifically looking at the usage crates.io would need made it clear I had forgotten to do this)
use std::result; use std::convert::From; use std::ffi::NulError; #[derive(Debug, PartialEq)] pub enum Error { InvalidCString(NulError), DatabaseError(String), } #[derive(Debug)] pub enum ConnectionError { InvalidCString(NulError), BadConnection(String), } pub type Result<T> = result::Result<T, Error>; pub type ConnectionResult<T> = result::Result<T, ConnectionError>; impl From<NulError> for ConnectionError { fn from(e: NulError) -> Self { ConnectionError::InvalidCString(e) } } impl From<NulError> for Error { fn from(e: NulError) -> Self { Error::InvalidCString(e) } }
use std::result; use std::convert::From; use std::error::Error as StdError; use std::fmt::{self, Display, Write}; use std::ffi::NulError; #[derive(Debug, PartialEq)] pub enum Error { InvalidCString(NulError), DatabaseError(String), } #[derive(Debug)] pub enum ConnectionError { InvalidCString(NulError), BadConnection(String), } pub type Result<T> = result::Result<T, Error>; pub type ConnectionResult<T> = result::Result<T, ConnectionError>; impl From<NulError> for ConnectionError { fn from(e: NulError) -> Self { ConnectionError::InvalidCString(e) } } impl From<NulError> for Error { fn from(e: NulError) -> Self { Error::InvalidCString(e) } } impl Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Error::InvalidCString(ref nul_err) => nul_err.fmt(f), &Error::DatabaseError(ref s) => write!(f, "{}", &s), } } } impl StdError for Error { fn description(&self) -> &str { match self { &Error::InvalidCString(ref nul_err) => nul_err.description(), &Error::DatabaseError(ref s) => &s, } } } impl Display for ConnectionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &ConnectionError::InvalidCString(ref nul_err) => nul_err.fmt(f), &ConnectionError::BadConnection(ref s) => write!(f, "{}", &s), } } } impl StdError for ConnectionError { fn description(&self) -> &str { match self { &ConnectionError::InvalidCString(ref nul_err) => nul_err.description(), &ConnectionError::BadConnection(ref s) => &s, } } }
38
0
2
add_only
--- a/src/result.rs +++ b/src/result.rs @@ -2,2 +2,4 @@ use std::convert::From; +use std::error::Error as StdError; +use std::fmt::{self, Display, Write}; use std::ffi::NulError; @@ -30 +32,37 @@ } + +impl Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + &Error::InvalidCString(ref nul_err) => nul_err.fmt(f), + &Error::DatabaseError(ref s) => write!(f, "{}", &s), + } + } +} + +impl StdError for Error { + fn description(&self) -> &str { + match self { + &Error::InvalidCString(ref nul_err) => nul_err.description(), + &Error::DatabaseError(ref s) => &s, + } + } +} + +impl Display for ConnectionError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + &ConnectionError::InvalidCString(ref nul_err) => nul_err.fmt(f), + &ConnectionError::BadConnection(ref s) => write!(f, "{}", &s), + } + } +} + +impl StdError for ConnectionError { + fn description(&self) -> &str { + match self { + &ConnectionError::InvalidCString(ref nul_err) => nul_err.description(), + &ConnectionError::BadConnection(ref s) => &s, + } + } +}
--- a/src/result.rs +++ b/src/result.rs @@ ... @@ use std::convert::From; +use std::error::Error as StdError; +use std::fmt::{self, Display, Write}; use std::ffi::NulError; @@ ... @@ } + +impl Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + &Error::InvalidCString(ref nul_err) => nul_err.fmt(f), + &Error::DatabaseError(ref s) => write!(f, "{}", &s), + } + } +} + +impl StdError for Error { + fn description(&self) -> &str { + match self { + &Error::InvalidCString(ref nul_err) => nul_err.description(), + &Error::DatabaseError(ref s) => &s, + } + } +} + +impl Display for ConnectionError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + &ConnectionError::InvalidCString(ref nul_err) => nul_err.fmt(f), + &ConnectionError::BadConnection(ref s) => write!(f, "{}", &s), + } + } +} + +impl StdError for ConnectionError { + fn description(&self) -> &str { + match self { + &ConnectionError::InvalidCString(ref nul_err) => nul_err.description(), + &ConnectionError::BadConnection(ref s) => &s, + } + } +}
--- a/src/result.rs +++ b/src/result.rs @@ -2,2 +2,4 @@ CON use std::convert::From; ADD use std::error::Error as StdError; ADD use std::fmt::{self, Display, Write}; CON use std::ffi::NulError; @@ -30 +32,37 @@ CON } ADD ADD impl Display for Error { ADD fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ADD match self { ADD &Error::InvalidCString(ref nul_err) => nul_err.fmt(f), ADD &Error::DatabaseError(ref s) => write!(f, "{}", &s), ADD } ADD } ADD } ADD ADD impl StdError for Error { ADD fn description(&self) -> &str { ADD match self { ADD &Error::InvalidCString(ref nul_err) => nul_err.description(), ADD &Error::DatabaseError(ref s) => &s, ADD } ADD } ADD } ADD ADD impl Display for ConnectionError { ADD fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ADD match self { ADD &ConnectionError::InvalidCString(ref nul_err) => nul_err.fmt(f), ADD &ConnectionError::BadConnection(ref s) => write!(f, "{}", &s), ADD } ADD } ADD } ADD ADD impl StdError for ConnectionError { ADD fn description(&self) -> &str { ADD match self { ADD &ConnectionError::InvalidCString(ref nul_err) => nul_err.description(), ADD &ConnectionError::BadConnection(ref s) => &s, ADD } ADD } ADD }
<<<<<<< SEARCH use std::result; use std::convert::From; use std::ffi::NulError; ======= use std::result; use std::convert::From; use std::error::Error as StdError; use std::fmt::{self, Display, Write}; use std::ffi::NulError; >>>>>>> REPLACE <<<<<<< SEARCH } } ======= } } impl Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Error::InvalidCString(ref nul_err) => nul_err.fmt(f), &Error::DatabaseError(ref s) => write!(f, "{}", &s), } } } impl StdError for Error { fn description(&self) -> &str { match self { &Error::InvalidCString(ref nul_err) => nul_err.description(), &Error::DatabaseError(ref s) => &s, } } } impl Display for ConnectionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &ConnectionError::InvalidCString(ref nul_err) => nul_err.fmt(f), &ConnectionError::BadConnection(ref s) => write!(f, "{}", &s), } } } impl StdError for ConnectionError { fn description(&self) -> &str { match self { &ConnectionError::InvalidCString(ref nul_err) => nul_err.description(), &ConnectionError::BadConnection(ref s) => &s, } } } >>>>>>> REPLACE
KevinFrans3/piston
43b133f292b9500df1962fb649ea24fdef4e02ad
src/asset_store.rs
rust
mit
Make `AssetStore` return Err when empty Closes https://github.com/PistonDevelopers/piston/issues/432
//! Storing sounds, textures, animations etc. // Extern crates. use std::os::self_exe_path; /// A place to store sounds, textures, animations etc. /// /// The idea is to have one object which the app can use /// to load assets for the game with a simple interface. pub struct AssetStore { // The folder to load assets from. assets_folder: Option<String>, } impl AssetStore { /// Creates a new `AssetStore` from an assets folder. pub fn from_folder(assets_folder: &str) -> AssetStore { AssetStore { assets_folder: Some(assets_folder.to_string()), } } /// Creates an empty `AssetStore` with no assets. pub fn empty() -> AssetStore { AssetStore { assets_folder: None, } } /// Returns the path of an asset file. pub fn path(&self, file: &str) -> Result<Path, String> { let folder = self.assets_folder.as_ref().unwrap(); let exe_path = self_exe_path(); let exe_path = match exe_path { Some(path) => path, None => return Err("Could not get the path to executable".to_string()), }; Ok(exe_path.join(Path::new(folder.as_slice())).join(Path::new(file))) } }
//! Storing sounds, textures, animations etc. // Extern crates. use std::os::self_exe_path; /// A place to store sounds, textures, animations etc. /// /// The idea is to have one object which the app can use /// to load assets for the game with a simple interface. pub struct AssetStore { // The folder to load assets from. assets_folder: Option<String>, } impl AssetStore { /// Creates a new `AssetStore` from an assets folder. pub fn from_folder(assets_folder: &str) -> AssetStore { AssetStore { assets_folder: Some(assets_folder.to_string()), } } /// Creates an empty `AssetStore` with no assets. pub fn empty() -> AssetStore { AssetStore { assets_folder: None, } } /// Returns the path of an asset file. pub fn path(&self, file: &str) -> Result<Path, String> { let folder = match self.assets_folder.as_ref() { Some(folder) => folder, None => return Err( "The assets folder is not set".to_string() ) }; let exe_path = self_exe_path(); let exe_path = match exe_path { Some(path) => path, None => return Err( "Could not get the path to executable".to_string() ), }; Ok(exe_path.join(Path::new(folder.as_slice())).join(Path::new(file))) } }
9
2
2
mixed
--- a/src/asset_store.rs +++ b/src/asset_store.rs @@ -32,3 +32,8 @@ pub fn path(&self, file: &str) -> Result<Path, String> { - let folder = self.assets_folder.as_ref().unwrap(); + let folder = match self.assets_folder.as_ref() { + Some(folder) => folder, + None => return Err( + "The assets folder is not set".to_string() + ) + }; let exe_path = self_exe_path(); @@ -36,3 +41,5 @@ Some(path) => path, - None => return Err("Could not get the path to executable".to_string()), + None => return Err( + "Could not get the path to executable".to_string() + ), };
--- a/src/asset_store.rs +++ b/src/asset_store.rs @@ ... @@ pub fn path(&self, file: &str) -> Result<Path, String> { - let folder = self.assets_folder.as_ref().unwrap(); + let folder = match self.assets_folder.as_ref() { + Some(folder) => folder, + None => return Err( + "The assets folder is not set".to_string() + ) + }; let exe_path = self_exe_path(); @@ ... @@ Some(path) => path, - None => return Err("Could not get the path to executable".to_string()), + None => return Err( + "Could not get the path to executable".to_string() + ), };
--- a/src/asset_store.rs +++ b/src/asset_store.rs @@ -32,3 +32,8 @@ CON pub fn path(&self, file: &str) -> Result<Path, String> { DEL let folder = self.assets_folder.as_ref().unwrap(); ADD let folder = match self.assets_folder.as_ref() { ADD Some(folder) => folder, ADD None => return Err( ADD "The assets folder is not set".to_string() ADD ) ADD }; CON let exe_path = self_exe_path(); @@ -36,3 +41,5 @@ CON Some(path) => path, DEL None => return Err("Could not get the path to executable".to_string()), ADD None => return Err( ADD "Could not get the path to executable".to_string() ADD ), CON };
<<<<<<< SEARCH /// Returns the path of an asset file. pub fn path(&self, file: &str) -> Result<Path, String> { let folder = self.assets_folder.as_ref().unwrap(); let exe_path = self_exe_path(); let exe_path = match exe_path { Some(path) => path, None => return Err("Could not get the path to executable".to_string()), }; Ok(exe_path.join(Path::new(folder.as_slice())).join(Path::new(file))) ======= /// Returns the path of an asset file. pub fn path(&self, file: &str) -> Result<Path, String> { let folder = match self.assets_folder.as_ref() { Some(folder) => folder, None => return Err( "The assets folder is not set".to_string() ) }; let exe_path = self_exe_path(); let exe_path = match exe_path { Some(path) => path, None => return Err( "Could not get the path to executable".to_string() ), }; Ok(exe_path.join(Path::new(folder.as_slice())).join(Path::new(file))) >>>>>>> REPLACE
agrav/freesif
c464887817334cd1dbc3c4587f185ec7ea598fda
setup.py
python
mit
Enable versioning using Git tags, rendering of README in markdown syntax and limit depenencies by version.
# -*- coding: utf-8 -*- from setuptools import setup, find_packages from codecs import open # To use a consistent encoding from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file with open(path.join(here, 'README.txt'), encoding='utf-8') as f: long_description = f.read() setup( name='freesif', version='0.1', description='Get data from Sesam Interface Files', long_description=long_description, # url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', author_email='[email protected]', license='MIT', classifiers=[ # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License', # 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ], keywords='sesam structural hydrodynamic', packages=find_packages(exclude=['contrib', 'docs', 'tests*']), install_requires=['tables', 'numpy'], )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages import os # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( # package data name='freesif', description='Get data from Sesam Interface Files', use_scm_version=True, packages=find_packages(exclude=['contrib', 'docs', 'tests*']), package_data=dict(), python_requires='~=3.7', setup_requires=['setuptools_scm'], install_requires=[ 'tables>=3.6,<4', 'numpy>=1.17,<2' ], zip_safe=True, # meta data long_description=read('README.md'), keywords='sesam structural hydrodynamic', url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', author_email='[email protected]', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.4', ], )
25
17
2
mixed
--- a/setup.py +++ b/setup.py @@ -2,17 +2,32 @@ from setuptools import setup, find_packages -from codecs import open # To use a consistent encoding -from os import path +import os -here = path.abspath(path.dirname(__file__)) -# Get the long description from the relevant file -with open(path.join(here, 'README.txt'), encoding='utf-8') as f: - long_description = f.read() +# Utility function to read the README file. +# Used for the long_description. It's nice, because now 1) we have a top level +# README file and 2) it's easier to type in the README file than to put a raw +# string in below ... +def read(fname): + return open(os.path.join(os.path.dirname(__file__), fname)).read() + setup( + # package data name='freesif', - version='0.1', description='Get data from Sesam Interface Files', - long_description=long_description, -# url='https://github.com/agrav/freesif', + use_scm_version=True, + packages=find_packages(exclude=['contrib', 'docs', 'tests*']), + package_data=dict(), + python_requires='~=3.7', + setup_requires=['setuptools_scm'], + install_requires=[ + 'tables>=3.6,<4', + 'numpy>=1.17,<2' + ], + zip_safe=True, + + # meta data + long_description=read('README.md'), + keywords='sesam structural hydrodynamic', + url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', @@ -21,13 +36,6 @@ classifiers=[ - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', - # 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ], - keywords='sesam structural hydrodynamic', - packages=find_packages(exclude=['contrib', 'docs', 'tests*']), - install_requires=['tables', 'numpy'], )
--- a/setup.py +++ b/setup.py @@ ... @@ from setuptools import setup, find_packages -from codecs import open # To use a consistent encoding -from os import path +import os -here = path.abspath(path.dirname(__file__)) -# Get the long description from the relevant file -with open(path.join(here, 'README.txt'), encoding='utf-8') as f: - long_description = f.read() +# Utility function to read the README file. +# Used for the long_description. It's nice, because now 1) we have a top level +# README file and 2) it's easier to type in the README file than to put a raw +# string in below ... +def read(fname): + return open(os.path.join(os.path.dirname(__file__), fname)).read() + setup( + # package data name='freesif', - version='0.1', description='Get data from Sesam Interface Files', - long_description=long_description, -# url='https://github.com/agrav/freesif', + use_scm_version=True, + packages=find_packages(exclude=['contrib', 'docs', 'tests*']), + package_data=dict(), + python_requires='~=3.7', + setup_requires=['setuptools_scm'], + install_requires=[ + 'tables>=3.6,<4', + 'numpy>=1.17,<2' + ], + zip_safe=True, + + # meta data + long_description=read('README.md'), + keywords='sesam structural hydrodynamic', + url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', @@ ... @@ classifiers=[ - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', - # 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ], - keywords='sesam structural hydrodynamic', - packages=find_packages(exclude=['contrib', 'docs', 'tests*']), - install_requires=['tables', 'numpy'], )
--- a/setup.py +++ b/setup.py @@ -2,17 +2,32 @@ CON from setuptools import setup, find_packages DEL from codecs import open # To use a consistent encoding DEL from os import path ADD import os CON DEL here = path.abspath(path.dirname(__file__)) CON DEL # Get the long description from the relevant file DEL with open(path.join(here, 'README.txt'), encoding='utf-8') as f: DEL long_description = f.read() ADD # Utility function to read the README file. ADD # Used for the long_description. It's nice, because now 1) we have a top level ADD # README file and 2) it's easier to type in the README file than to put a raw ADD # string in below ... ADD def read(fname): ADD return open(os.path.join(os.path.dirname(__file__), fname)).read() ADD CON CON setup( ADD # package data CON name='freesif', DEL version='0.1', CON description='Get data from Sesam Interface Files', DEL long_description=long_description, DEL # url='https://github.com/agrav/freesif', ADD use_scm_version=True, ADD packages=find_packages(exclude=['contrib', 'docs', 'tests*']), ADD package_data=dict(), ADD python_requires='~=3.7', ADD setup_requires=['setuptools_scm'], ADD install_requires=[ ADD 'tables>=3.6,<4', ADD 'numpy>=1.17,<2' ADD ], ADD zip_safe=True, ADD ADD # meta data ADD long_description=read('README.md'), ADD keywords='sesam structural hydrodynamic', ADD url='https://github.com/agrav/freesif', CON author='Audun Gravdal Johansen', @@ -21,13 +36,6 @@ CON classifiers=[ DEL # 3 - Alpha DEL # 4 - Beta DEL # 5 - Production/Stable DEL 'Development Status :: 3 - Alpha', ADD 'Development Status :: 4 - Beta', CON 'License :: OSI Approved :: MIT License', DEL # 'Programming Language :: Python :: 2.7', CON 'Programming Language :: Python :: 3.4', CON ], DEL keywords='sesam structural hydrodynamic', DEL packages=find_packages(exclude=['contrib', 'docs', 'tests*']), DEL install_requires=['tables', 'numpy'], CON )
<<<<<<< SEARCH # -*- coding: utf-8 -*- from setuptools import setup, find_packages from codecs import open # To use a consistent encoding from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file with open(path.join(here, 'README.txt'), encoding='utf-8') as f: long_description = f.read() setup( name='freesif', version='0.1', description='Get data from Sesam Interface Files', long_description=long_description, # url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', author_email='[email protected]', license='MIT', classifiers=[ # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License', # 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', ], keywords='sesam structural hydrodynamic', packages=find_packages(exclude=['contrib', 'docs', 'tests*']), install_requires=['tables', 'numpy'], ) ======= # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( # package data name='freesif', description='Get data from Sesam Interface Files', use_scm_version=True, packages=find_packages(exclude=['contrib', 'docs', 'tests*']), package_data=dict(), python_requires='~=3.7', setup_requires=['setuptools_scm'], install_requires=[ 'tables>=3.6,<4', 'numpy>=1.17,<2' ], zip_safe=True, # meta data long_description=read('README.md'), keywords='sesam structural hydrodynamic', url='https://github.com/agrav/freesif', author='Audun Gravdal Johansen', author_email='[email protected]', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.4', ], ) >>>>>>> REPLACE
openstack/nova
98c2c311ad1a0797205da58ce4d3b7d9b4c66c57
nova/policies/pause_server.py
python
apache-2.0
Introduce scope_types in pause server policy oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for pause server API policies as: - ['system', 'project'] for pause/unpause policy. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: I828248ec42c71d67c8d9463d987d0afe54989c74
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-pause-server:%s' pause_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'pause', base.RULE_ADMIN_OR_OWNER, "Pause a server", [ { 'path': '/servers/{server_id}/action (pause)', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( POLICY_ROOT % 'unpause', base.RULE_ADMIN_OR_OWNER, "Unpause a paused server", [ { 'path': '/servers/{server_id}/action (unpause)', 'method': 'POST' } ] ), ] def list_rules(): return pause_server_policies
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base POLICY_ROOT = 'os_compute_api:os-pause-server:%s' pause_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'pause', check_str=base.RULE_ADMIN_OR_OWNER, description="Pause a server", operations=[ { 'path': '/servers/{server_id}/action (pause)', 'method': 'POST' } ], scope_types=['system', 'project'] ), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'unpause', check_str=base.RULE_ADMIN_OR_OWNER, description="Unpause a paused server", operations=[ { 'path': '/servers/{server_id}/action (unpause)', 'method': 'POST' } ], scope_types=['system', 'project'] ), ] def list_rules(): return pause_server_policies
12
10
3
mixed
--- a/nova/policies/pause_server.py +++ b/nova/policies/pause_server.py @@ -25,6 +25,6 @@ policy.DocumentedRuleDefault( - POLICY_ROOT % 'pause', - base.RULE_ADMIN_OR_OWNER, - "Pause a server", - [ + name=POLICY_ROOT % 'pause', + check_str=base.RULE_ADMIN_OR_OWNER, + description="Pause a server", + operations=[ { @@ -33,9 +33,10 @@ } - ] + ], + scope_types=['system', 'project'] ), policy.DocumentedRuleDefault( - POLICY_ROOT % 'unpause', - base.RULE_ADMIN_OR_OWNER, - "Unpause a paused server", - [ + name=POLICY_ROOT % 'unpause', + check_str=base.RULE_ADMIN_OR_OWNER, + description="Unpause a paused server", + operations=[ { @@ -44,3 +45,4 @@ } - ] + ], + scope_types=['system', 'project'] ),
--- a/nova/policies/pause_server.py +++ b/nova/policies/pause_server.py @@ ... @@ policy.DocumentedRuleDefault( - POLICY_ROOT % 'pause', - base.RULE_ADMIN_OR_OWNER, - "Pause a server", - [ + name=POLICY_ROOT % 'pause', + check_str=base.RULE_ADMIN_OR_OWNER, + description="Pause a server", + operations=[ { @@ ... @@ } - ] + ], + scope_types=['system', 'project'] ), policy.DocumentedRuleDefault( - POLICY_ROOT % 'unpause', - base.RULE_ADMIN_OR_OWNER, - "Unpause a paused server", - [ + name=POLICY_ROOT % 'unpause', + check_str=base.RULE_ADMIN_OR_OWNER, + description="Unpause a paused server", + operations=[ { @@ ... @@ } - ] + ], + scope_types=['system', 'project'] ),
--- a/nova/policies/pause_server.py +++ b/nova/policies/pause_server.py @@ -25,6 +25,6 @@ CON policy.DocumentedRuleDefault( DEL POLICY_ROOT % 'pause', DEL base.RULE_ADMIN_OR_OWNER, DEL "Pause a server", DEL [ ADD name=POLICY_ROOT % 'pause', ADD check_str=base.RULE_ADMIN_OR_OWNER, ADD description="Pause a server", ADD operations=[ CON { @@ -33,9 +33,10 @@ CON } DEL ] ADD ], ADD scope_types=['system', 'project'] CON ), CON policy.DocumentedRuleDefault( DEL POLICY_ROOT % 'unpause', DEL base.RULE_ADMIN_OR_OWNER, DEL "Unpause a paused server", DEL [ ADD name=POLICY_ROOT % 'unpause', ADD check_str=base.RULE_ADMIN_OR_OWNER, ADD description="Unpause a paused server", ADD operations=[ CON { @@ -44,3 +45,4 @@ CON } DEL ] ADD ], ADD scope_types=['system', 'project'] CON ),
<<<<<<< SEARCH pause_server_policies = [ policy.DocumentedRuleDefault( POLICY_ROOT % 'pause', base.RULE_ADMIN_OR_OWNER, "Pause a server", [ { 'path': '/servers/{server_id}/action (pause)', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( POLICY_ROOT % 'unpause', base.RULE_ADMIN_OR_OWNER, "Unpause a paused server", [ { 'path': '/servers/{server_id}/action (unpause)', 'method': 'POST' } ] ), ] ======= pause_server_policies = [ policy.DocumentedRuleDefault( name=POLICY_ROOT % 'pause', check_str=base.RULE_ADMIN_OR_OWNER, description="Pause a server", operations=[ { 'path': '/servers/{server_id}/action (pause)', 'method': 'POST' } ], scope_types=['system', 'project'] ), policy.DocumentedRuleDefault( name=POLICY_ROOT % 'unpause', check_str=base.RULE_ADMIN_OR_OWNER, description="Unpause a paused server", operations=[ { 'path': '/servers/{server_id}/action (unpause)', 'method': 'POST' } ], scope_types=['system', 'project'] ), ] >>>>>>> REPLACE
nham/gymnosporangium
7f388d551a37a12e329dadf3f692905f6038f0e3
src/traversal.rs
rust
mit
Change bfs to not traverse all the connected components.
use {HashSet}; use std::collections::{HashMap, RingBuf, Deque}; use graph::{Graph, NodeIndex, Digraph}; /// Do a breadth-first search of the graph, returning the resulting breadth- /// first forest. fn bfs_forest<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { let mut forest = Digraph::new(); if g.num_nodes() == 0 { return forest; } let mut unvisited = HashSet::new(); let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); for i in g.node_indices() { unvisited.insert(i); } discovered.push_back((start, None)); loop { match discovered.pop_front() { None => { if unvisited.len() == 0 { break; } else { let another = unvisited.iter().next().unwrap(); discovered.push_back((*another, None)); continue; } }, Some((ind, parent)) => { forest.add_node(ind); if parent.is_some() { forest.add_edge(parent.unwrap(), ind); } visited.insert(ind); unvisited.remove(&ind); for i in g.adj(ind) { if !visited.contains(&i) { discovered.push_back((i, Some(ind))); } } } } } return forest; }
use {HashSet}; use std::collections::{HashMap, RingBuf, Deque}; use graph::{Graph, NodeIndex, Digraph}; /// Do a breadth-first search of the graph, returning the resulting breadth- /// first tree (a tree on the connected component containing the stard node) fn bfs_tree<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { let mut tree = Digraph::new(); if g.num_nodes() == 0 { return tree; } let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); discovered.push_back((start, None)); loop { match discovered.pop_front() { None => break, Some((ind, parent)) => { tree.add_node(ind); if parent.is_some() { tree.add_edge(parent.unwrap(), ind); } visited.insert(ind); for i in g.adj(ind) { if !visited.contains(&i) { discovered.push_back((i, Some(ind))); } } } } } return tree; }
8
22
3
mixed
--- a/src/traversal.rs +++ b/src/traversal.rs @@ -5,17 +5,12 @@ /// Do a breadth-first search of the graph, returning the resulting breadth- -/// first forest. -fn bfs_forest<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { - let mut forest = Digraph::new(); +/// first tree (a tree on the connected component containing the stard node) +fn bfs_tree<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { + let mut tree = Digraph::new(); if g.num_nodes() == 0 { - return forest; + return tree; } - let mut unvisited = HashSet::new(); let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); - - for i in g.node_indices() { - unvisited.insert(i); - } @@ -24,18 +19,9 @@ match discovered.pop_front() { - None => { - if unvisited.len() == 0 { - break; - } else { - let another = unvisited.iter().next().unwrap(); - discovered.push_back((*another, None)); - continue; - } - }, + None => break, Some((ind, parent)) => { - forest.add_node(ind); + tree.add_node(ind); if parent.is_some() { - forest.add_edge(parent.unwrap(), ind); + tree.add_edge(parent.unwrap(), ind); } visited.insert(ind); - unvisited.remove(&ind); @@ -49,3 +35,3 @@ } - return forest; + return tree; }
--- a/src/traversal.rs +++ b/src/traversal.rs @@ ... @@ /// Do a breadth-first search of the graph, returning the resulting breadth- -/// first forest. -fn bfs_forest<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { - let mut forest = Digraph::new(); +/// first tree (a tree on the connected component containing the stard node) +fn bfs_tree<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { + let mut tree = Digraph::new(); if g.num_nodes() == 0 { - return forest; + return tree; } - let mut unvisited = HashSet::new(); let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); - - for i in g.node_indices() { - unvisited.insert(i); - } @@ ... @@ match discovered.pop_front() { - None => { - if unvisited.len() == 0 { - break; - } else { - let another = unvisited.iter().next().unwrap(); - discovered.push_back((*another, None)); - continue; - } - }, + None => break, Some((ind, parent)) => { - forest.add_node(ind); + tree.add_node(ind); if parent.is_some() { - forest.add_edge(parent.unwrap(), ind); + tree.add_edge(parent.unwrap(), ind); } visited.insert(ind); - unvisited.remove(&ind); @@ ... @@ } - return forest; + return tree; }
--- a/src/traversal.rs +++ b/src/traversal.rs @@ -5,17 +5,12 @@ CON /// Do a breadth-first search of the graph, returning the resulting breadth- DEL /// first forest. DEL fn bfs_forest<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { DEL let mut forest = Digraph::new(); ADD /// first tree (a tree on the connected component containing the stard node) ADD fn bfs_tree<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { ADD let mut tree = Digraph::new(); CON CON if g.num_nodes() == 0 { DEL return forest; ADD return tree; CON } CON DEL let mut unvisited = HashSet::new(); CON let mut visited = HashSet::new(); CON let mut discovered = RingBuf::new(); DEL DEL for i in g.node_indices() { DEL unvisited.insert(i); DEL } CON @@ -24,18 +19,9 @@ CON match discovered.pop_front() { DEL None => { DEL if unvisited.len() == 0 { DEL break; DEL } else { DEL let another = unvisited.iter().next().unwrap(); DEL discovered.push_back((*another, None)); DEL continue; DEL } DEL }, ADD None => break, CON Some((ind, parent)) => { DEL forest.add_node(ind); ADD tree.add_node(ind); CON if parent.is_some() { DEL forest.add_edge(parent.unwrap(), ind); ADD tree.add_edge(parent.unwrap(), ind); CON } CON visited.insert(ind); DEL unvisited.remove(&ind); CON @@ -49,3 +35,3 @@ CON } DEL return forest; ADD return tree; CON }
<<<<<<< SEARCH /// Do a breadth-first search of the graph, returning the resulting breadth- /// first forest. fn bfs_forest<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { let mut forest = Digraph::new(); if g.num_nodes() == 0 { return forest; } let mut unvisited = HashSet::new(); let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); for i in g.node_indices() { unvisited.insert(i); } discovered.push_back((start, None)); loop { match discovered.pop_front() { None => { if unvisited.len() == 0 { break; } else { let another = unvisited.iter().next().unwrap(); discovered.push_back((*another, None)); continue; } }, Some((ind, parent)) => { forest.add_node(ind); if parent.is_some() { forest.add_edge(parent.unwrap(), ind); } visited.insert(ind); unvisited.remove(&ind); for i in g.adj(ind) { ======= /// Do a breadth-first search of the graph, returning the resulting breadth- /// first tree (a tree on the connected component containing the stard node) fn bfs_tree<T, G: Graph<T>>(g: &G, start: NodeIndex) -> Digraph<NodeIndex> { let mut tree = Digraph::new(); if g.num_nodes() == 0 { return tree; } let mut visited = HashSet::new(); let mut discovered = RingBuf::new(); discovered.push_back((start, None)); loop { match discovered.pop_front() { None => break, Some((ind, parent)) => { tree.add_node(ind); if parent.is_some() { tree.add_edge(parent.unwrap(), ind); } visited.insert(ind); for i in g.adj(ind) { >>>>>>> REPLACE <<<<<<< SEARCH } } return forest; } ======= } } return tree; } >>>>>>> REPLACE
Freeyourgadget/Gadgetbridge
89bf63d54085e4fe86a861079b0f33b0c7b4d86d
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java
java
agpl-3.0
Implement hashCode() when you implement equals()!
package nodomain.freeyourgadget.gadgetbridge.model; /** * Created by steffen on 07.06.16. */ public class MusicStateSpec { public static final int STATE_PLAYING = 0; public static final int STATE_PAUSED = 1; public static final int STATE_STOPPED = 2; public static final int STATE_UNKNOWN = 3; public byte state; public int position; public int playRate; public byte shuffle; public byte repeat; public MusicStateSpec() { } public MusicStateSpec(MusicStateSpec old) { this.state = old.state; this.position = old.position; this.playRate = old.playRate; this.shuffle = old.shuffle; this.repeat = old.repeat; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof MusicStateSpec)) { return false; } MusicStateSpec stateSpec = (MusicStateSpec) obj; return this.state == stateSpec.state && Math.abs(this.position - stateSpec.position)<=2 && this.playRate == stateSpec.playRate && this.shuffle == stateSpec.shuffle && this.repeat == stateSpec.repeat; } }
package nodomain.freeyourgadget.gadgetbridge.model; /** * Created by steffen on 07.06.16. */ public class MusicStateSpec { public static final int STATE_PLAYING = 0; public static final int STATE_PAUSED = 1; public static final int STATE_STOPPED = 2; public static final int STATE_UNKNOWN = 3; public byte state; public int position; public int playRate; public byte shuffle; public byte repeat; public MusicStateSpec() { } public MusicStateSpec(MusicStateSpec old) { this.state = old.state; this.position = old.position; this.playRate = old.playRate; this.shuffle = old.shuffle; this.repeat = old.repeat; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof MusicStateSpec)) { return false; } MusicStateSpec stateSpec = (MusicStateSpec) obj; return this.state == stateSpec.state && Math.abs(this.position - stateSpec.position)<=2 && this.playRate == stateSpec.playRate && this.shuffle == stateSpec.shuffle && this.repeat == stateSpec.repeat; } @Override public int hashCode() { int result = (int) state; // ignore the position -- it is taken into account in equals() // result = 31 * result + position; result = 31 * result + playRate; result = 31 * result + (int) shuffle; result = 31 * result + (int) repeat; return result; } }
11
0
1
add_only
--- a/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java +++ b/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java @@ -45,2 +45,13 @@ } + + @Override + public int hashCode() { + int result = (int) state; +// ignore the position -- it is taken into account in equals() +// result = 31 * result + position; + result = 31 * result + playRate; + result = 31 * result + (int) shuffle; + result = 31 * result + (int) repeat; + return result; + } }
--- a/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java +++ b/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java @@ ... @@ } + + @Override + public int hashCode() { + int result = (int) state; +// ignore the position -- it is taken into account in equals() +// result = 31 * result + position; + result = 31 * result + playRate; + result = 31 * result + (int) shuffle; + result = 31 * result + (int) repeat; + return result; + } }
--- a/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java +++ b/app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/MusicStateSpec.java @@ -45,2 +45,13 @@ CON } ADD ADD @Override ADD public int hashCode() { ADD int result = (int) state; ADD // ignore the position -- it is taken into account in equals() ADD // result = 31 * result + position; ADD result = 31 * result + playRate; ADD result = 31 * result + (int) shuffle; ADD result = 31 * result + (int) repeat; ADD return result; ADD } CON }
<<<<<<< SEARCH this.repeat == stateSpec.repeat; } } ======= this.repeat == stateSpec.repeat; } @Override public int hashCode() { int result = (int) state; // ignore the position -- it is taken into account in equals() // result = 31 * result + position; result = 31 * result + playRate; result = 31 * result + (int) shuffle; result = 31 * result + (int) repeat; return result; } } >>>>>>> REPLACE
blindpirate/gradle
65401d90fa02c6a5cc6a7ae53eaa58cdb028c77c
src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt
kotlin
apache-2.0
Improve pop culture reference in test case
package org.gradle.script.lang.kotlin.support import org.gradle.script.lang.kotlin.TestWithTempFiles import org.gradle.script.lang.kotlin.loggerFor import org.hamcrest.CoreMatchers.equalTo import org.hamcrest.MatcherAssert.assertThat import org.junit.Test import java.io.File import java.net.URLClassLoader class KotlinCompilerTest : TestWithTempFiles() { @Test fun `can compile Kotlin source file into jar`() { val sourceFile = newFile("DeepThought.kt").apply { writeText(""" package adams class DeepThought { fun compute(): Int = 42 } """) } val outputJar = newFile("output.jar") compileToJar(outputJar, sourceFile, loggerFor<KotlinCompilerTest>()) val answer = classLoaderFor(outputJar) .loadClass("adams.DeepThought") .newInstance() .run { javaClass.getMethod("compute").invoke(this) } assertThat( answer, equalTo<Any>(42)) } private fun classLoaderFor(outputJar: File) = URLClassLoader.newInstance( arrayOf(outputJar.toURI().toURL())) }
package org.gradle.script.lang.kotlin.support import org.gradle.script.lang.kotlin.TestWithTempFiles import org.gradle.script.lang.kotlin.loggerFor import org.hamcrest.CoreMatchers.equalTo import org.hamcrest.MatcherAssert.assertThat import org.junit.Test import java.io.File import java.net.URLClassLoader class KotlinCompilerTest : TestWithTempFiles() { @Test fun `can compile Kotlin source file into jar`() { val sourceFile = newFile("DeepThought.kt").apply { writeText(""" package hhgttg class DeepThought { fun compute(): Int = 42 } """) } val outputJar = newFile("output.jar") compileToJar(outputJar, sourceFile, loggerFor<KotlinCompilerTest>()) val answer = classLoaderFor(outputJar) .loadClass("hhgttg.DeepThought") .newInstance() .run { javaClass.getMethod("compute").invoke(this) } assertThat( answer, equalTo<Any>(42)) } private fun classLoaderFor(outputJar: File) = URLClassLoader.newInstance( arrayOf(outputJar.toURI().toURL())) }
2
2
2
mixed
--- a/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt +++ b/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt @@ -21,3 +21,3 @@ writeText(""" - package adams + package hhgttg @@ -35,3 +35,3 @@ classLoaderFor(outputJar) - .loadClass("adams.DeepThought") + .loadClass("hhgttg.DeepThought") .newInstance()
--- a/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt +++ b/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt @@ ... @@ writeText(""" - package adams + package hhgttg @@ ... @@ classLoaderFor(outputJar) - .loadClass("adams.DeepThought") + .loadClass("hhgttg.DeepThought") .newInstance()
--- a/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt +++ b/src/test/kotlin/org/gradle/script/lang/kotlin/support/KotlinCompilerTest.kt @@ -21,3 +21,3 @@ CON writeText(""" DEL package adams ADD package hhgttg CON @@ -35,3 +35,3 @@ CON classLoaderFor(outputJar) DEL .loadClass("adams.DeepThought") ADD .loadClass("hhgttg.DeepThought") CON .newInstance()
<<<<<<< SEARCH val sourceFile = newFile("DeepThought.kt").apply { writeText(""" package adams class DeepThought { ======= val sourceFile = newFile("DeepThought.kt").apply { writeText(""" package hhgttg class DeepThought { >>>>>>> REPLACE <<<<<<< SEARCH val answer = classLoaderFor(outputJar) .loadClass("adams.DeepThought") .newInstance() .run { ======= val answer = classLoaderFor(outputJar) .loadClass("hhgttg.DeepThought") .newInstance() .run { >>>>>>> REPLACE
mattgreen/watchexec
0b5120430cd73059d40145602383cf2a39012030
src/lib.rs
rust
apache-2.0
Revise clippy lints to avoid breakage
//! Watchexec: the library //! //! This is the library version of the CLI tool [watchexec]. The tool is //! implemented with this library, but the purpose of the watchexec project is //! to deliver the CLI tool, instead of focusing on the library interface first //! and foremost. **For this reason, semver guarantees do _not_ apply to this //! library.** Please use exact version matching, as this API may break even //! between patch point releases. This policy may change in the future. //! //! [watchexec]: https://github.com/watchexec/watchexec #![forbid( clippy::pedantic, clippy::nursery, deprecated, intra_doc_link_resolution_failure, clippy::option_unwrap_used, clippy::result_unwrap_used )] #![deny(unsafe_code, clippy::missing_const_for_fn, clippy::redundant_clone)] #![allow( clippy::default_trait_access, clippy::cognitive_complexity, clippy::too_many_lines )] #[macro_use] extern crate clap; #[macro_use] extern crate derive_builder; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; pub mod cli; pub mod error; mod gitignore; mod ignore; mod notification_filter; pub mod pathop; mod process; pub mod run; mod signal; mod watcher; pub use cli::{Args, ArgsBuilder}; pub use run::{run, watch, Handler};
//! Watchexec: the library //! //! This is the library version of the CLI tool [watchexec]. The tool is //! implemented with this library, but the purpose of the watchexec project is //! to deliver the CLI tool, instead of focusing on the library interface first //! and foremost. **For this reason, semver guarantees do _not_ apply to this //! library.** Please use exact version matching, as this API may break even //! between patch point releases. This policy may change in the future. //! //! [watchexec]: https://github.com/watchexec/watchexec #![forbid(deprecated)] #![warn( clippy::all, clippy::missing_const_for_fn, clippy::option_unwrap_used, clippy::result_unwrap_used, intra_doc_link_resolution_failure )] #[macro_use] extern crate clap; #[macro_use] extern crate derive_builder; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; pub mod cli; pub mod error; mod gitignore; mod ignore; mod notification_filter; pub mod pathop; mod process; pub mod run; mod signal; mod watcher; pub use cli::{Args, ArgsBuilder}; pub use run::{run, watch, Handler};
6
12
1
mixed
--- a/src/lib.rs +++ b/src/lib.rs @@ -11,15 +11,9 @@ -#![forbid( - clippy::pedantic, - clippy::nursery, - deprecated, - intra_doc_link_resolution_failure, +#![forbid(deprecated)] +#![warn( + clippy::all, + clippy::missing_const_for_fn, clippy::option_unwrap_used, - clippy::result_unwrap_used -)] -#![deny(unsafe_code, clippy::missing_const_for_fn, clippy::redundant_clone)] -#![allow( - clippy::default_trait_access, - clippy::cognitive_complexity, - clippy::too_many_lines + clippy::result_unwrap_used, + intra_doc_link_resolution_failure )]
--- a/src/lib.rs +++ b/src/lib.rs @@ ... @@ -#![forbid( - clippy::pedantic, - clippy::nursery, - deprecated, - intra_doc_link_resolution_failure, +#![forbid(deprecated)] +#![warn( + clippy::all, + clippy::missing_const_for_fn, clippy::option_unwrap_used, - clippy::result_unwrap_used -)] -#![deny(unsafe_code, clippy::missing_const_for_fn, clippy::redundant_clone)] -#![allow( - clippy::default_trait_access, - clippy::cognitive_complexity, - clippy::too_many_lines + clippy::result_unwrap_used, + intra_doc_link_resolution_failure )]
--- a/src/lib.rs +++ b/src/lib.rs @@ -11,15 +11,9 @@ CON DEL #![forbid( DEL clippy::pedantic, DEL clippy::nursery, DEL deprecated, DEL intra_doc_link_resolution_failure, ADD #![forbid(deprecated)] ADD #![warn( ADD clippy::all, ADD clippy::missing_const_for_fn, CON clippy::option_unwrap_used, DEL clippy::result_unwrap_used DEL )] DEL #![deny(unsafe_code, clippy::missing_const_for_fn, clippy::redundant_clone)] DEL #![allow( DEL clippy::default_trait_access, DEL clippy::cognitive_complexity, DEL clippy::too_many_lines ADD clippy::result_unwrap_used, ADD intra_doc_link_resolution_failure CON )]
<<<<<<< SEARCH //! [watchexec]: https://github.com/watchexec/watchexec #![forbid( clippy::pedantic, clippy::nursery, deprecated, intra_doc_link_resolution_failure, clippy::option_unwrap_used, clippy::result_unwrap_used )] #![deny(unsafe_code, clippy::missing_const_for_fn, clippy::redundant_clone)] #![allow( clippy::default_trait_access, clippy::cognitive_complexity, clippy::too_many_lines )] ======= //! [watchexec]: https://github.com/watchexec/watchexec #![forbid(deprecated)] #![warn( clippy::all, clippy::missing_const_for_fn, clippy::option_unwrap_used, clippy::result_unwrap_used, intra_doc_link_resolution_failure )] >>>>>>> REPLACE
defuz/rust
ee06ed2bfd233b57c5989696bb723bddf6569622
src/libcore/rt/work_queue.rs
rust
apache-2.0
core::rt: Put a lock on the work queue
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use container::Container; use option::*; use vec::OwnedVector; pub struct WorkQueue<T> { priv queue: ~[T] } pub impl<T> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { queue: ~[] } } fn push(&mut self, value: T) { self.queue.unshift(value) } fn pop(&mut self) -> Option<T> { if !self.queue.is_empty() { Some(self.queue.shift()) } else { None } } fn steal(&mut self) -> Option<T> { if !self.queue.is_empty() { Some(self.queue.pop()) } else { None } } fn is_empty(&self) -> bool { return self.queue.is_empty(); } }
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use container::Container; use option::*; use vec::OwnedVector; use unstable::sync::{Exclusive, exclusive}; use cell::Cell; use kinds::Owned; pub struct WorkQueue<T> { // XXX: Another mystery bug fixed by boxing this lock priv queue: ~Exclusive<~[T]> } pub impl<T: Owned> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { queue: ~exclusive(~[]) } } fn push(&mut self, value: T) { let value = Cell(value); self.queue.with(|q| q.unshift(value.take()) ); } fn pop(&mut self) -> Option<T> { do self.queue.with |q| { if !q.is_empty() { Some(q.shift()) } else { None } } } fn steal(&mut self) -> Option<T> { do self.queue.with |q| { if !q.is_empty() { Some(q.pop()) } else { None } } } fn is_empty(&self) -> bool { self.queue.with_imm(|q| q.is_empty() ) } }
22
13
5
mixed
--- a/src/libcore/rt/work_queue.rs +++ b/src/libcore/rt/work_queue.rs @@ -13,11 +13,15 @@ use vec::OwnedVector; +use unstable::sync::{Exclusive, exclusive}; +use cell::Cell; +use kinds::Owned; pub struct WorkQueue<T> { - priv queue: ~[T] + // XXX: Another mystery bug fixed by boxing this lock + priv queue: ~Exclusive<~[T]> } -pub impl<T> WorkQueue<T> { +pub impl<T: Owned> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { - queue: ~[] + queue: ~exclusive(~[]) } @@ -26,3 +30,4 @@ fn push(&mut self, value: T) { - self.queue.unshift(value) + let value = Cell(value); + self.queue.with(|q| q.unshift(value.take()) ); } @@ -30,6 +35,8 @@ fn pop(&mut self) -> Option<T> { - if !self.queue.is_empty() { - Some(self.queue.shift()) - } else { - None + do self.queue.with |q| { + if !q.is_empty() { + Some(q.shift()) + } else { + None + } } @@ -38,6 +45,8 @@ fn steal(&mut self) -> Option<T> { - if !self.queue.is_empty() { - Some(self.queue.pop()) - } else { - None + do self.queue.with |q| { + if !q.is_empty() { + Some(q.pop()) + } else { + None + } } @@ -46,3 +55,3 @@ fn is_empty(&self) -> bool { - return self.queue.is_empty(); + self.queue.with_imm(|q| q.is_empty() ) }
--- a/src/libcore/rt/work_queue.rs +++ b/src/libcore/rt/work_queue.rs @@ ... @@ use vec::OwnedVector; +use unstable::sync::{Exclusive, exclusive}; +use cell::Cell; +use kinds::Owned; pub struct WorkQueue<T> { - priv queue: ~[T] + // XXX: Another mystery bug fixed by boxing this lock + priv queue: ~Exclusive<~[T]> } -pub impl<T> WorkQueue<T> { +pub impl<T: Owned> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { - queue: ~[] + queue: ~exclusive(~[]) } @@ ... @@ fn push(&mut self, value: T) { - self.queue.unshift(value) + let value = Cell(value); + self.queue.with(|q| q.unshift(value.take()) ); } @@ ... @@ fn pop(&mut self) -> Option<T> { - if !self.queue.is_empty() { - Some(self.queue.shift()) - } else { - None + do self.queue.with |q| { + if !q.is_empty() { + Some(q.shift()) + } else { + None + } } @@ ... @@ fn steal(&mut self) -> Option<T> { - if !self.queue.is_empty() { - Some(self.queue.pop()) - } else { - None + do self.queue.with |q| { + if !q.is_empty() { + Some(q.pop()) + } else { + None + } } @@ ... @@ fn is_empty(&self) -> bool { - return self.queue.is_empty(); + self.queue.with_imm(|q| q.is_empty() ) }
--- a/src/libcore/rt/work_queue.rs +++ b/src/libcore/rt/work_queue.rs @@ -13,11 +13,15 @@ CON use vec::OwnedVector; ADD use unstable::sync::{Exclusive, exclusive}; ADD use cell::Cell; ADD use kinds::Owned; CON CON pub struct WorkQueue<T> { DEL priv queue: ~[T] ADD // XXX: Another mystery bug fixed by boxing this lock ADD priv queue: ~Exclusive<~[T]> CON } CON DEL pub impl<T> WorkQueue<T> { ADD pub impl<T: Owned> WorkQueue<T> { CON fn new() -> WorkQueue<T> { CON WorkQueue { DEL queue: ~[] ADD queue: ~exclusive(~[]) CON } @@ -26,3 +30,4 @@ CON fn push(&mut self, value: T) { DEL self.queue.unshift(value) ADD let value = Cell(value); ADD self.queue.with(|q| q.unshift(value.take()) ); CON } @@ -30,6 +35,8 @@ CON fn pop(&mut self) -> Option<T> { DEL if !self.queue.is_empty() { DEL Some(self.queue.shift()) DEL } else { DEL None ADD do self.queue.with |q| { ADD if !q.is_empty() { ADD Some(q.shift()) ADD } else { ADD None ADD } CON } @@ -38,6 +45,8 @@ CON fn steal(&mut self) -> Option<T> { DEL if !self.queue.is_empty() { DEL Some(self.queue.pop()) DEL } else { DEL None ADD do self.queue.with |q| { ADD if !q.is_empty() { ADD Some(q.pop()) ADD } else { ADD None ADD } CON } @@ -46,3 +55,3 @@ CON fn is_empty(&self) -> bool { DEL return self.queue.is_empty(); ADD self.queue.with_imm(|q| q.is_empty() ) CON }
<<<<<<< SEARCH use option::*; use vec::OwnedVector; pub struct WorkQueue<T> { priv queue: ~[T] } pub impl<T> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { queue: ~[] } } fn push(&mut self, value: T) { self.queue.unshift(value) } fn pop(&mut self) -> Option<T> { if !self.queue.is_empty() { Some(self.queue.shift()) } else { None } } fn steal(&mut self) -> Option<T> { if !self.queue.is_empty() { Some(self.queue.pop()) } else { None } } fn is_empty(&self) -> bool { return self.queue.is_empty(); } } ======= use option::*; use vec::OwnedVector; use unstable::sync::{Exclusive, exclusive}; use cell::Cell; use kinds::Owned; pub struct WorkQueue<T> { // XXX: Another mystery bug fixed by boxing this lock priv queue: ~Exclusive<~[T]> } pub impl<T: Owned> WorkQueue<T> { fn new() -> WorkQueue<T> { WorkQueue { queue: ~exclusive(~[]) } } fn push(&mut self, value: T) { let value = Cell(value); self.queue.with(|q| q.unshift(value.take()) ); } fn pop(&mut self) -> Option<T> { do self.queue.with |q| { if !q.is_empty() { Some(q.shift()) } else { None } } } fn steal(&mut self) -> Option<T> { do self.queue.with |q| { if !q.is_empty() { Some(q.pop()) } else { None } } } fn is_empty(&self) -> bool { self.queue.with_imm(|q| q.is_empty() ) } } >>>>>>> REPLACE
saltstack/salt
baacda228682a50acc5a4528d43f5d3a88c7c6ec
salt/client/netapi.py
python
apache-2.0
Make sure to not leave hanging children processes if the parent is killed
# encoding: utf-8 ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) multiprocessing.Process(target=netapi[fun]).start()
# encoding: utf-8 ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing import signal # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.processes = [] def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join()
16
1
3
mixed
--- a/salt/client/netapi.py +++ b/salt/client/netapi.py @@ -7,2 +7,3 @@ import multiprocessing +import signal @@ -20,2 +21,3 @@ self.opts = opts + self.processes = [] @@ -29,2 +31,15 @@ logger.info("Starting '{0}' api module".format(fun)) - multiprocessing.Process(target=netapi[fun]).start() + p = multiprocessing.Process(target=netapi[fun]) + p.start() + self.processes.append(p) + + # make sure to kill the subprocesses if the parent is killed + signal.signal(signal.SIGTERM, self.kill_children) + + def kill_children(self, *args): + ''' + Kill all of the children + ''' + for p in self.processes: + p.terminate() + p.join()
--- a/salt/client/netapi.py +++ b/salt/client/netapi.py @@ ... @@ import multiprocessing +import signal @@ ... @@ self.opts = opts + self.processes = [] @@ ... @@ logger.info("Starting '{0}' api module".format(fun)) - multiprocessing.Process(target=netapi[fun]).start() + p = multiprocessing.Process(target=netapi[fun]) + p.start() + self.processes.append(p) + + # make sure to kill the subprocesses if the parent is killed + signal.signal(signal.SIGTERM, self.kill_children) + + def kill_children(self, *args): + ''' + Kill all of the children + ''' + for p in self.processes: + p.terminate() + p.join()
--- a/salt/client/netapi.py +++ b/salt/client/netapi.py @@ -7,2 +7,3 @@ CON import multiprocessing ADD import signal CON @@ -20,2 +21,3 @@ CON self.opts = opts ADD self.processes = [] CON @@ -29,2 +31,15 @@ CON logger.info("Starting '{0}' api module".format(fun)) DEL multiprocessing.Process(target=netapi[fun]).start() ADD p = multiprocessing.Process(target=netapi[fun]) ADD p.start() ADD self.processes.append(p) ADD ADD # make sure to kill the subprocesses if the parent is killed ADD signal.signal(signal.SIGTERM, self.kill_children) ADD ADD def kill_children(self, *args): ADD ''' ADD Kill all of the children ADD ''' ADD for p in self.processes: ADD p.terminate() ADD p.join()
<<<<<<< SEARCH import logging import multiprocessing # Import salt-api libs ======= import logging import multiprocessing import signal # Import salt-api libs >>>>>>> REPLACE <<<<<<< SEARCH def __init__(self, opts): self.opts = opts def run(self): ======= def __init__(self, opts): self.opts = opts self.processes = [] def run(self): >>>>>>> REPLACE <<<<<<< SEARCH if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) multiprocessing.Process(target=netapi[fun]).start() ======= if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join() >>>>>>> REPLACE
lm-tools/sectors
f9365594e415e9485dc6a4435cb14b3eaf8b9c58
situational/settings/heroku.py
python
bsd-3-clause
Remove RedisCloud URL form Heroku
import os from .base import * DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Sym Roe', '[email protected]'), ) MANAGERS = ADMINS ALLOWED_HOSTS = ['.herokuapp.com'] INTERNAL_IPS = () # Redirect any non-HTTP request to HTTPS SECURE_SSL_REDIRECT = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Only allow sessions when serving the site over HTTPS SESSION_COOKIE_SECURE = True # Only send CSRF protection cookies when serving the site over HTTPS CSRF_COOKIE_SECURE = True # Use the X-Request=ID HTTP Header as the request ID LOG_REQUEST_ID_HEADER = "HTTP_X_REQUEST_ID" import dj_database_url DATABASES['default'] = dj_database_url.config() DATABASES['default']['ENGINE'] = 'django_postgrespool' REDIS_URL = os.environ['REDISCLOUD_URL'] EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER'] EMAIL_HOST_USER = os.environ['MAILGUN_SMTP_LOGIN'] EMAIL_HOST_PASSWORD = os.environ['MAILGUN_SMTP_PASSWORD'] EMAIL_PORT = os.environ['MAILGUN_SMTP_PORT'] EMAIL_USE_TLS = True
import os from .base import * DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Sym Roe', '[email protected]'), ) MANAGERS = ADMINS ALLOWED_HOSTS = ['.herokuapp.com'] INTERNAL_IPS = () # Redirect any non-HTTP request to HTTPS SECURE_SSL_REDIRECT = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Only allow sessions when serving the site over HTTPS SESSION_COOKIE_SECURE = True # Only send CSRF protection cookies when serving the site over HTTPS CSRF_COOKIE_SECURE = True # Use the X-Request=ID HTTP Header as the request ID LOG_REQUEST_ID_HEADER = "HTTP_X_REQUEST_ID" import dj_database_url DATABASES['default'] = dj_database_url.config() DATABASES['default']['ENGINE'] = 'django_postgrespool' EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER'] EMAIL_HOST_USER = os.environ['MAILGUN_SMTP_LOGIN'] EMAIL_HOST_PASSWORD = os.environ['MAILGUN_SMTP_PASSWORD'] EMAIL_PORT = os.environ['MAILGUN_SMTP_PORT'] EMAIL_USE_TLS = True
0
2
1
del_only
--- a/situational/settings/heroku.py +++ b/situational/settings/heroku.py @@ -33,4 +33,2 @@ -REDIS_URL = os.environ['REDISCLOUD_URL'] - EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER']
--- a/situational/settings/heroku.py +++ b/situational/settings/heroku.py @@ ... @@ -REDIS_URL = os.environ['REDISCLOUD_URL'] - EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER']
--- a/situational/settings/heroku.py +++ b/situational/settings/heroku.py @@ -33,4 +33,2 @@ CON DEL REDIS_URL = os.environ['REDISCLOUD_URL'] DEL CON EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER']
<<<<<<< SEARCH DATABASES['default']['ENGINE'] = 'django_postgrespool' REDIS_URL = os.environ['REDISCLOUD_URL'] EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER'] EMAIL_HOST_USER = os.environ['MAILGUN_SMTP_LOGIN'] ======= DATABASES['default']['ENGINE'] = 'django_postgrespool' EMAIL_HOST = os.environ['MAILGUN_SMTP_SERVER'] EMAIL_HOST_USER = os.environ['MAILGUN_SMTP_LOGIN'] >>>>>>> REPLACE
bcb/jsonrpcclient
eb2827a94e477c64eeb67076288007326c34a2f9
setup.py
python
mit
Add aiohttpClient plus example usage Closes #20
"""setup.py""" from codecs import open as codecs_open from setuptools import setup with codecs_open('README.rst', 'r', 'utf-8') as f: __README = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: __HISTORY = f.read() setup( name='jsonrpcclient', version='2.2.4', description='Send JSON-RPC requests', long_description=__README+'\n\n'+__HISTORY, author='Beau Barker', author_email='[email protected]', url='https://jsonrpcclient.readthedocs.io/', license='MIT', packages=['jsonrpcclient'], package_data={'jsonrpcclient': ['response-schema.json']}, include_package_data=True, install_requires=['future', 'jsonschema'], extras_require={ 'requests': ['requests'], 'requests_security': ['requests[security]'], 'zmq': ['pyzmq'], 'tornado': ['tornado'], 'unittest': ['requests', 'pyzmq', 'tornado', 'responses', \ 'testfixtures', 'mock'] }, classifiers=[ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], )
"""setup.py""" from codecs import open as codecs_open from setuptools import setup with codecs_open('README.rst', 'r', 'utf-8') as f: __README = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: __HISTORY = f.read() setup( name='jsonrpcclient', version='2.2.4', description='Send JSON-RPC requests', long_description=__README+'\n\n'+__HISTORY, author='Beau Barker', author_email='[email protected]', url='https://jsonrpcclient.readthedocs.io/', license='MIT', packages=['jsonrpcclient'], package_data={'jsonrpcclient': ['response-schema.json']}, include_package_data=True, install_requires=['future', 'jsonschema'], extras_require={ 'aiohttp': ['aiohttp'], 'requests': ['requests'], 'requests_security': ['requests[security]'], 'tornado': ['tornado'], 'unittest': ['requests', 'pyzmq', 'tornado', 'responses', \ 'testfixtures', 'mock'] 'websockets': ['websockets'], 'zmq': ['pyzmq'], }, classifiers=[ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], )
3
1
2
mixed
--- a/setup.py +++ b/setup.py @@ -24,5 +24,5 @@ extras_require={ + 'aiohttp': ['aiohttp'], 'requests': ['requests'], 'requests_security': ['requests[security]'], - 'zmq': ['pyzmq'], 'tornado': ['tornado'], @@ -30,2 +30,4 @@ 'testfixtures', 'mock'] + 'websockets': ['websockets'], + 'zmq': ['pyzmq'], },
--- a/setup.py +++ b/setup.py @@ ... @@ extras_require={ + 'aiohttp': ['aiohttp'], 'requests': ['requests'], 'requests_security': ['requests[security]'], - 'zmq': ['pyzmq'], 'tornado': ['tornado'], @@ ... @@ 'testfixtures', 'mock'] + 'websockets': ['websockets'], + 'zmq': ['pyzmq'], },
--- a/setup.py +++ b/setup.py @@ -24,5 +24,5 @@ CON extras_require={ ADD 'aiohttp': ['aiohttp'], CON 'requests': ['requests'], CON 'requests_security': ['requests[security]'], DEL 'zmq': ['pyzmq'], CON 'tornado': ['tornado'], @@ -30,2 +30,4 @@ CON 'testfixtures', 'mock'] ADD 'websockets': ['websockets'], ADD 'zmq': ['pyzmq'], CON },
<<<<<<< SEARCH install_requires=['future', 'jsonschema'], extras_require={ 'requests': ['requests'], 'requests_security': ['requests[security]'], 'zmq': ['pyzmq'], 'tornado': ['tornado'], 'unittest': ['requests', 'pyzmq', 'tornado', 'responses', \ 'testfixtures', 'mock'] }, classifiers=[ ======= install_requires=['future', 'jsonschema'], extras_require={ 'aiohttp': ['aiohttp'], 'requests': ['requests'], 'requests_security': ['requests[security]'], 'tornado': ['tornado'], 'unittest': ['requests', 'pyzmq', 'tornado', 'responses', \ 'testfixtures', 'mock'] 'websockets': ['websockets'], 'zmq': ['pyzmq'], }, classifiers=[ >>>>>>> REPLACE
jimmycuadra/rust-etcd
ffbc9b9a22f0c202223a68a5f6f5453b970610da
build.rs
rust
mit
Fix compilation error due to syntex mismatched types Error: build.rs:27:37: 27:50 error: mismatched types: expected `&mut syntex::Registry`, found `&mut inner::syntex::Registry` (expected struct `syntex::Registry`, found struct `inner::syntex::Registry`) [E0308] build.rs:27 serde_codegen::register(&mut registry); ^~~~~~~~~~~~~ Solution is to use codegen without explicit dependency on syntex, see: https://users.rust-lang.org/t/here-is-how-to-avoid-being-broken-by-syntex-updates/6189?u=dtolnay
#[cfg(not(feature = "serde_macros"))] mod inner { extern crate serde_codegen; extern crate syntex; use std::env; use std::path::Path; const MODULES: &'static[&'static str] = &[ "error", "keys", "stats", "version", ]; pub fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); for module in MODULES.iter() { let src = format!("src/{}_gen.rs", module); let src_path = Path::new(&src); let dst = format!("{}.rs", module); let dst_path = Path::new(&out_dir).join(&dst); let mut registry = syntex::Registry::new(); serde_codegen::register(&mut registry); registry.expand("", &src_path, &dst_path).unwrap(); } } } #[cfg(feature = "serde_macros")] mod inner { pub fn main() {} } fn main() { inner::main(); }
#[cfg(not(feature = "serde_macros"))] mod inner { extern crate serde_codegen; use std::env; use std::path::Path; const MODULES: &'static[&'static str] = &[ "error", "keys", "stats", "version", ]; pub fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); for module in MODULES.iter() { let src = format!("src/{}_gen.rs", module); let src_path = Path::new(&src); let dst = format!("{}.rs", module); let dst_path = Path::new(&out_dir).join(&dst); serde_codegen::expand(&src_path, &dst_path).unwrap(); } } } #[cfg(feature = "serde_macros")] mod inner { pub fn main() {} } fn main() { inner::main(); }
1
5
2
mixed
--- a/build.rs +++ b/build.rs @@ -3,3 +3,2 @@ extern crate serde_codegen; - extern crate syntex; @@ -24,6 +23,3 @@ - let mut registry = syntex::Registry::new(); - - serde_codegen::register(&mut registry); - registry.expand("", &src_path, &dst_path).unwrap(); + serde_codegen::expand(&src_path, &dst_path).unwrap(); }
--- a/build.rs +++ b/build.rs @@ ... @@ extern crate serde_codegen; - extern crate syntex; @@ ... @@ - let mut registry = syntex::Registry::new(); - - serde_codegen::register(&mut registry); - registry.expand("", &src_path, &dst_path).unwrap(); + serde_codegen::expand(&src_path, &dst_path).unwrap(); }
--- a/build.rs +++ b/build.rs @@ -3,3 +3,2 @@ CON extern crate serde_codegen; DEL extern crate syntex; CON @@ -24,6 +23,3 @@ CON DEL let mut registry = syntex::Registry::new(); DEL DEL serde_codegen::register(&mut registry); DEL registry.expand("", &src_path, &dst_path).unwrap(); ADD serde_codegen::expand(&src_path, &dst_path).unwrap(); CON }
<<<<<<< SEARCH mod inner { extern crate serde_codegen; extern crate syntex; use std::env; ======= mod inner { extern crate serde_codegen; use std::env; >>>>>>> REPLACE <<<<<<< SEARCH let dst_path = Path::new(&out_dir).join(&dst); let mut registry = syntex::Registry::new(); serde_codegen::register(&mut registry); registry.expand("", &src_path, &dst_path).unwrap(); } } ======= let dst_path = Path::new(&out_dir).join(&dst); serde_codegen::expand(&src_path, &dst_path).unwrap(); } } >>>>>>> REPLACE
tf/pageflow
5d153d46e9b72d80fed1a7c3c04a86ade6546ef9
entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js
javascript
mit
Allow disabling video or dash via browser feature flag Used for debugging. REDMINE-18067
export function sources(videoFile, quality = 'auto') { if (quality === 'auto') { let result = [ { type: 'application/x-mpegURL', src: videoFile.urls['hls-playlist'] }, { type: 'video/mp4', src: videoFile.urls.high } ]; if (videoFile.urls['dash-playlist']) { result = [ { type: 'application/dash+xml', src: videoFile.urls['dash-playlist'] } ].concat(result); } return result; } else { if (!videoFile.urls[quality]) { quality = 'high'; } return [ { type: 'video/mp4', src: videoFile.urls[quality] } ]; } }
import {browser} from 'pageflow/frontend'; browser.feature('dash', () => true); browser.feature('video', () => true); browser.feature('highdef', () => true); export function sources(videoFile, quality = 'auto') { if (typeof window !== 'undefined') { if (!browser.has('video')) { return []; } if (!browser.has('highdef')) { return [ { type: 'video/mp4', src: videoFile.urls.high } ]; } if (!browser.has('dash')) { return [ { type: 'video/mp4', src: videoFile.urls['4k'] || videoFile.urls.fullhd || videoFile.urls.high } ]; } } if (quality === 'auto') { let result = [ { type: 'application/x-mpegURL', src: videoFile.urls['hls-playlist'] }, { type: 'video/mp4', src: videoFile.urls.high } ]; if (videoFile.urls['dash-playlist']) { result = [ { type: 'application/dash+xml', src: videoFile.urls['dash-playlist'] } ].concat(result); } return result; } else { if (!videoFile.urls[quality]) { quality = 'high'; } return [ { type: 'video/mp4', src: videoFile.urls[quality] } ]; } }
30
0
1
add_only
--- a/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js +++ b/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js @@ -1,2 +1,32 @@ +import {browser} from 'pageflow/frontend'; + +browser.feature('dash', () => true); +browser.feature('video', () => true); +browser.feature('highdef', () => true); + export function sources(videoFile, quality = 'auto') { + if (typeof window !== 'undefined') { + if (!browser.has('video')) { + return []; + } + + if (!browser.has('highdef')) { + return [ + { + type: 'video/mp4', + src: videoFile.urls.high + } + ]; + } + + if (!browser.has('dash')) { + return [ + { + type: 'video/mp4', + src: videoFile.urls['4k'] || videoFile.urls.fullhd || videoFile.urls.high + } + ]; + } + } + if (quality === 'auto') {
--- a/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js +++ b/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js @@ ... @@ +import {browser} from 'pageflow/frontend'; + +browser.feature('dash', () => true); +browser.feature('video', () => true); +browser.feature('highdef', () => true); + export function sources(videoFile, quality = 'auto') { + if (typeof window !== 'undefined') { + if (!browser.has('video')) { + return []; + } + + if (!browser.has('highdef')) { + return [ + { + type: 'video/mp4', + src: videoFile.urls.high + } + ]; + } + + if (!browser.has('dash')) { + return [ + { + type: 'video/mp4', + src: videoFile.urls['4k'] || videoFile.urls.fullhd || videoFile.urls.high + } + ]; + } + } + if (quality === 'auto') {
--- a/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js +++ b/entry_types/scrolled/package/src/frontend/VideoPlayer/sources.js @@ -1,2 +1,32 @@ ADD import {browser} from 'pageflow/frontend'; ADD ADD browser.feature('dash', () => true); ADD browser.feature('video', () => true); ADD browser.feature('highdef', () => true); ADD CON export function sources(videoFile, quality = 'auto') { ADD if (typeof window !== 'undefined') { ADD if (!browser.has('video')) { ADD return []; ADD } ADD ADD if (!browser.has('highdef')) { ADD return [ ADD { ADD type: 'video/mp4', ADD src: videoFile.urls.high ADD } ADD ]; ADD } ADD ADD if (!browser.has('dash')) { ADD return [ ADD { ADD type: 'video/mp4', ADD src: videoFile.urls['4k'] || videoFile.urls.fullhd || videoFile.urls.high ADD } ADD ]; ADD } ADD } ADD CON if (quality === 'auto') {
<<<<<<< SEARCH export function sources(videoFile, quality = 'auto') { if (quality === 'auto') { let result = [ ======= import {browser} from 'pageflow/frontend'; browser.feature('dash', () => true); browser.feature('video', () => true); browser.feature('highdef', () => true); export function sources(videoFile, quality = 'auto') { if (typeof window !== 'undefined') { if (!browser.has('video')) { return []; } if (!browser.has('highdef')) { return [ { type: 'video/mp4', src: videoFile.urls.high } ]; } if (!browser.has('dash')) { return [ { type: 'video/mp4', src: videoFile.urls['4k'] || videoFile.urls.fullhd || videoFile.urls.high } ]; } } if (quality === 'auto') { let result = [ >>>>>>> REPLACE
StepicOrg/stepik-android
428638304b2da579c8a87191917372153aebbe19
app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt
kotlin
apache-2.0
Fix background in feedback activity
package org.stepic.droid.ui.activities import android.os.Bundle import android.view.MenuItem import androidx.fragment.app.Fragment import org.stepic.droid.R import org.stepic.droid.base.SingleFragmentActivity import org.stepic.droid.ui.fragments.FeedbackFragment import org.stepic.droid.ui.util.initCenteredToolbar class FeedbackActivity : SingleFragmentActivity() { override fun createFragment(): Fragment = FeedbackFragment.newInstance() override fun getLayoutResId(): Int = R.layout.activity_container_with_bar override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) window.setBackgroundDrawable(null) setUpToolbar() } private fun setUpToolbar() { initCenteredToolbar(R.string.feedback_title, showHomeButton = true, homeIndicator = closeIconDrawableRes) } override fun onOptionsItemSelected(item: MenuItem?): Boolean = when (item?.itemId) { android.R.id.home -> { finish() true } else -> super.onOptionsItemSelected(item) } override fun finish() { super.finish() overridePendingTransition(R.anim.no_transition, R.anim.push_down) } }
package org.stepic.droid.ui.activities import android.os.Bundle import android.view.MenuItem import androidx.fragment.app.Fragment import org.stepic.droid.R import org.stepic.droid.base.SingleFragmentActivity import org.stepic.droid.ui.fragments.FeedbackFragment import org.stepic.droid.ui.util.initCenteredToolbar class FeedbackActivity : SingleFragmentActivity() { override fun createFragment(): Fragment = FeedbackFragment.newInstance() override fun getLayoutResId(): Int = R.layout.activity_container_with_bar override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setUpToolbar() } private fun setUpToolbar() { initCenteredToolbar(R.string.feedback_title, showHomeButton = true, homeIndicator = closeIconDrawableRes) } override fun onOptionsItemSelected(item: MenuItem?): Boolean = when (item?.itemId) { android.R.id.home -> { finish() true } else -> super.onOptionsItemSelected(item) } override fun finish() { super.finish() overridePendingTransition(R.anim.no_transition, R.anim.push_down) } }
0
1
1
del_only
--- a/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt +++ b/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt @@ -19,3 +19,2 @@ super.onCreate(savedInstanceState) - window.setBackgroundDrawable(null) setUpToolbar()
--- a/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt +++ b/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt @@ ... @@ super.onCreate(savedInstanceState) - window.setBackgroundDrawable(null) setUpToolbar()
--- a/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt +++ b/app/src/main/java/org/stepic/droid/ui/activities/FeedbackActivity.kt @@ -19,3 +19,2 @@ CON super.onCreate(savedInstanceState) DEL window.setBackgroundDrawable(null) CON setUpToolbar()
<<<<<<< SEARCH override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) window.setBackgroundDrawable(null) setUpToolbar() } ======= override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setUpToolbar() } >>>>>>> REPLACE
giuseppecuccu/jsvq
2b0a0a058232acc74cc6ad2db359fc84590900ca
svq.java
java
mit
Save buffered image as bmp
// Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } // to byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ImageIO.write( input, "bmp", baos ); baos.flush(); } catch (IOException e) { } byte[] bytearray = baos.toByteArray(); // to BufferedImage ByteArrayInputStream bais = new ByteArrayInputStream(bytearray); BufferedImage output = null; try { output = ImageIO.read(bais); bais.close(); } catch (IOException e) { throw new RuntimeException(e); } System.out.println("Done!"); } }
// Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } // to byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ImageIO.write( input, "bmp", baos ); baos.flush(); } catch (IOException e) { } byte[] bytearray = baos.toByteArray(); // to BufferedImage ByteArrayInputStream bais = new ByteArrayInputStream(bytearray); BufferedImage output = null; try { output = ImageIO.read(bais); bais.close(); } catch (IOException e) { throw new RuntimeException(e); } // save result try { ImageIO.write(output, "BMP", new File("test.bmp")); } catch (IOException e) { throw new RuntimeException(e); } System.out.println("Done!"); } }
7
0
1
add_only
--- a/svq.java +++ b/svq.java @@ -42,2 +42,9 @@ + // save result + try { + ImageIO.write(output, "BMP", new File("test.bmp")); + } catch (IOException e) { + throw new RuntimeException(e); + } + System.out.println("Done!");
--- a/svq.java +++ b/svq.java @@ ... @@ + // save result + try { + ImageIO.write(output, "BMP", new File("test.bmp")); + } catch (IOException e) { + throw new RuntimeException(e); + } + System.out.println("Done!");
--- a/svq.java +++ b/svq.java @@ -42,2 +42,9 @@ CON ADD // save result ADD try { ADD ImageIO.write(output, "BMP", new File("test.bmp")); ADD } catch (IOException e) { ADD throw new RuntimeException(e); ADD } ADD CON System.out.println("Done!");
<<<<<<< SEARCH } System.out.println("Done!"); } ======= } // save result try { ImageIO.write(output, "BMP", new File("test.bmp")); } catch (IOException e) { throw new RuntimeException(e); } System.out.println("Done!"); } >>>>>>> REPLACE
matthiask/towel
019b4ed21eddaa176c9445f40f91dc0becad7b4a
towel/mt/forms.py
python
bsd-3-clause
towel.mt: Stop evaluating querysets when processing form fields
""" Forms ===== These three form subclasses will automatically add limitation by tenant to all form fields with a ``queryset`` attribute. .. warning:: If you customized the dropdown using ``choices`` you have to limit the choices by the current tenant yourself. """ from django import forms from towel import forms as towel_forms from towel.utils import safe_queryset_and def _process_fields(form, request): for field in form.fields.values(): if getattr(field, 'queryset', None): model = field.queryset.model field.queryset = safe_queryset_and( field.queryset, model.objects.for_access(request.access), ) class Form(forms.Form): def __init__(self, *args, **kwargs): self.request = kwargs.pop('request') super(Form, self).__init__(*args, **kwargs) _process_fields(self, self.request) class ModelForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.request = kwargs.pop('request') super(ModelForm, self).__init__(*args, **kwargs) _process_fields(self, self.request) class SearchForm(towel_forms.SearchForm): def post_init(self, request): self.request = request _process_fields(self, self.request)
""" Forms ===== These three form subclasses will automatically add limitation by tenant to all form fields with a ``queryset`` attribute. .. warning:: If you customized the dropdown using ``choices`` you have to limit the choices by the current tenant yourself. """ from django import forms from towel import forms as towel_forms from towel.utils import safe_queryset_and def _process_fields(form, request): for field in form.fields.values(): if hasattr(field, 'queryset'): model = field.queryset.model field.queryset = safe_queryset_and( field.queryset, model.objects.for_access(request.access), ) class Form(forms.Form): def __init__(self, *args, **kwargs): self.request = kwargs.pop('request') super(Form, self).__init__(*args, **kwargs) _process_fields(self, self.request) class ModelForm(forms.ModelForm): def __init__(self, *args, **kwargs): self.request = kwargs.pop('request') super(ModelForm, self).__init__(*args, **kwargs) _process_fields(self, self.request) class SearchForm(towel_forms.SearchForm): def post_init(self, request): self.request = request _process_fields(self, self.request)
1
1
1
mixed
--- a/towel/mt/forms.py +++ b/towel/mt/forms.py @@ -21,3 +21,3 @@ for field in form.fields.values(): - if getattr(field, 'queryset', None): + if hasattr(field, 'queryset'): model = field.queryset.model
--- a/towel/mt/forms.py +++ b/towel/mt/forms.py @@ ... @@ for field in form.fields.values(): - if getattr(field, 'queryset', None): + if hasattr(field, 'queryset'): model = field.queryset.model
--- a/towel/mt/forms.py +++ b/towel/mt/forms.py @@ -21,3 +21,3 @@ CON for field in form.fields.values(): DEL if getattr(field, 'queryset', None): ADD if hasattr(field, 'queryset'): CON model = field.queryset.model
<<<<<<< SEARCH def _process_fields(form, request): for field in form.fields.values(): if getattr(field, 'queryset', None): model = field.queryset.model ======= def _process_fields(form, request): for field in form.fields.values(): if hasattr(field, 'queryset'): model = field.queryset.model >>>>>>> REPLACE
NordicSemiconductor/Android-nRF-Toolbox
d8f4906afb5ffd35d38ca0a9c2160295dc96e9fb
profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt
kotlin
bsd-3-clause
Change alarm sound for medium and high levels
package no.nordicsemi.android.prx.repository import android.content.Context import android.media.RingtoneManager import android.os.Build import dagger.hilt.android.qualifiers.ApplicationContext import no.nordicsemi.android.prx.data.AlarmLevel import javax.inject.Inject internal class AlarmHandler @Inject constructor( @ApplicationContext private val context: Context ) { private val ringtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)) fun playAlarm(alarmLevel: AlarmLevel) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { ringtone.volume = when (alarmLevel) { AlarmLevel.NONE -> 0f AlarmLevel.MEDIUM -> 0.5f AlarmLevel.HIGH -> 1f } } ringtone.play() } fun pauseAlarm() { if (ringtone.isPlaying) { ringtone.stop() } } }
package no.nordicsemi.android.prx.repository import android.content.Context import android.media.RingtoneManager import android.os.Build import dagger.hilt.android.qualifiers.ApplicationContext import no.nordicsemi.android.prx.data.AlarmLevel import javax.inject.Inject internal class AlarmHandler @Inject constructor( @ApplicationContext private val context: Context ) { private val highLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM)).apply { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { volume = 1f } } private val mediumLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)).apply { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { volume = 0.5f } } fun playAlarm(alarmLevel: AlarmLevel) { val ringtone = when (alarmLevel) { AlarmLevel.NONE -> null AlarmLevel.MEDIUM -> mediumLevelRingtone AlarmLevel.HIGH -> highLevelRingtone } ringtone?.play() } fun pauseAlarm() { highLevelRingtone.takeIf { it.isPlaying }?.stop() mediumLevelRingtone.takeIf { it.isPlaying }?.stop() } }
18
12
2
mixed
--- a/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt +++ b/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt @@ -14,14 +14,21 @@ - private val ringtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)) + private val highLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM)).apply { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + volume = 1f + } + } + + private val mediumLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)).apply { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + volume = 0.5f + } + } fun playAlarm(alarmLevel: AlarmLevel) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - ringtone.volume = when (alarmLevel) { - AlarmLevel.NONE -> 0f - AlarmLevel.MEDIUM -> 0.5f - AlarmLevel.HIGH -> 1f - } + val ringtone = when (alarmLevel) { + AlarmLevel.NONE -> null + AlarmLevel.MEDIUM -> mediumLevelRingtone + AlarmLevel.HIGH -> highLevelRingtone } - - ringtone.play() + ringtone?.play() } @@ -29,5 +36,4 @@ fun pauseAlarm() { - if (ringtone.isPlaying) { - ringtone.stop() - } + highLevelRingtone.takeIf { it.isPlaying }?.stop() + mediumLevelRingtone.takeIf { it.isPlaying }?.stop() }
--- a/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt +++ b/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt @@ ... @@ - private val ringtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)) + private val highLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM)).apply { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + volume = 1f + } + } + + private val mediumLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)).apply { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + volume = 0.5f + } + } fun playAlarm(alarmLevel: AlarmLevel) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - ringtone.volume = when (alarmLevel) { - AlarmLevel.NONE -> 0f - AlarmLevel.MEDIUM -> 0.5f - AlarmLevel.HIGH -> 1f - } + val ringtone = when (alarmLevel) { + AlarmLevel.NONE -> null + AlarmLevel.MEDIUM -> mediumLevelRingtone + AlarmLevel.HIGH -> highLevelRingtone } - - ringtone.play() + ringtone?.play() } @@ ... @@ fun pauseAlarm() { - if (ringtone.isPlaying) { - ringtone.stop() - } + highLevelRingtone.takeIf { it.isPlaying }?.stop() + mediumLevelRingtone.takeIf { it.isPlaying }?.stop() }
--- a/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt +++ b/profile_prx/src/main/java/no/nordicsemi/android/prx/repository/AlarmHandler.kt @@ -14,14 +14,21 @@ CON DEL private val ringtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)) ADD private val highLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM)).apply { ADD if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { ADD volume = 1f ADD } ADD } ADD ADD private val mediumLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)).apply { ADD if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { ADD volume = 0.5f ADD } ADD } CON CON fun playAlarm(alarmLevel: AlarmLevel) { DEL if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { DEL ringtone.volume = when (alarmLevel) { DEL AlarmLevel.NONE -> 0f DEL AlarmLevel.MEDIUM -> 0.5f DEL AlarmLevel.HIGH -> 1f DEL } ADD val ringtone = when (alarmLevel) { ADD AlarmLevel.NONE -> null ADD AlarmLevel.MEDIUM -> mediumLevelRingtone ADD AlarmLevel.HIGH -> highLevelRingtone CON } DEL DEL ringtone.play() ADD ringtone?.play() CON } @@ -29,5 +36,4 @@ CON fun pauseAlarm() { DEL if (ringtone.isPlaying) { DEL ringtone.stop() DEL } ADD highLevelRingtone.takeIf { it.isPlaying }?.stop() ADD mediumLevelRingtone.takeIf { it.isPlaying }?.stop() CON }
<<<<<<< SEARCH ) { private val ringtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)) fun playAlarm(alarmLevel: AlarmLevel) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { ringtone.volume = when (alarmLevel) { AlarmLevel.NONE -> 0f AlarmLevel.MEDIUM -> 0.5f AlarmLevel.HIGH -> 1f } } ringtone.play() } fun pauseAlarm() { if (ringtone.isPlaying) { ringtone.stop() } } } ======= ) { private val highLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM)).apply { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { volume = 1f } } private val mediumLevelRingtone = RingtoneManager.getRingtone(context, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE)).apply { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { volume = 0.5f } } fun playAlarm(alarmLevel: AlarmLevel) { val ringtone = when (alarmLevel) { AlarmLevel.NONE -> null AlarmLevel.MEDIUM -> mediumLevelRingtone AlarmLevel.HIGH -> highLevelRingtone } ringtone?.play() } fun pauseAlarm() { highLevelRingtone.takeIf { it.isPlaying }?.stop() mediumLevelRingtone.takeIf { it.isPlaying }?.stop() } } >>>>>>> REPLACE
kotlinx/kotlinx.html
546089b6192f1e1805c335d53d45a9b55c3af2bf
js/src/test/kotlin/trees.kt
kotlin
apache-2.0
Add test for event handling
package html4k.tests import html4k.dom.append import html4k.js.div import html4k.p import org.w3c.dom.asList import kotlin.browser.document import kotlin.test.assertEquals import kotlin.test.assertTrue import org.junit.Test as test class DomTreeImplTest { test fun simpleTree() { val node = document.body!!.append.div { p { +"test" } } assertEquals("DIV", node.tagName) assertEquals(1, node.childNodes.length) assertEquals("P", node.children[0]?.tagName) assertTrue(document.body!!.children.length > 0) assertEquals(node, document.body!!.children.asList().last()) } }
package html4k.tests import html4k.dom.append import html4k.js.div import html4k.js.onClickFunction import html4k.p import org.w3c.dom.events.Event import org.w3c.dom.HTMLDivElement import org.w3c.dom.HTMLElement import org.w3c.dom.asList import kotlin.dom.asList import kotlin.browser.document import kotlin.test.assertEquals import kotlin.test.assertNotNull import kotlin.test.assertTrue import org.junit.Test as test class DomTreeImplTest { test fun simpleTree() { val node = document.body!!.append.div { p { +"test" } } assertEquals("DIV", node.tagName) assertEquals(1, node.childNodes.length) assertEquals("P", node.children[0]?.tagName) assertTrue(document.body!!.children.length > 0) assertEquals(node, document.body!!.children.asList().last()) } test fun appendSingleNode() { val myDiv: HTMLDivElement = document.body!!.append.div { p { +"test" } } assertEquals("DIV", myDiv.tagName) assertEquals(document.body, myDiv.parentNode) assertEquals("<div><p>test</p></div>", myDiv.outerHTML.replace("\\s+".toRegex(), "")) } test fun appendNodeWithEventHandler() { var clicked = false document.body!!.append.div { onClickFunction = { clicked = true } } document.getElementsByTagName("div").asList().forEach { if (it is HTMLElement) { val clickHandler = it.onclick if (clickHandler != null) { clickHandler(uninitialized()) } } } assertTrue(clicked) } private fun <T> uninitialized(): T = null as T }
41
0
2
add_only
--- a/js/src/test/kotlin/trees.kt +++ b/js/src/test/kotlin/trees.kt @@ -4,6 +4,12 @@ import html4k.js.div +import html4k.js.onClickFunction import html4k.p +import org.w3c.dom.events.Event +import org.w3c.dom.HTMLDivElement +import org.w3c.dom.HTMLElement import org.w3c.dom.asList +import kotlin.dom.asList import kotlin.browser.document import kotlin.test.assertEquals +import kotlin.test.assertNotNull import kotlin.test.assertTrue @@ -26,2 +32,37 @@ } + + test fun appendSingleNode() { + val myDiv: HTMLDivElement = document.body!!.append.div { + p { + +"test" + } + } + + assertEquals("DIV", myDiv.tagName) + assertEquals(document.body, myDiv.parentNode) + assertEquals("<div><p>test</p></div>", myDiv.outerHTML.replace("\\s+".toRegex(), "")) + } + + test fun appendNodeWithEventHandler() { + var clicked = false + + document.body!!.append.div { + onClickFunction = { + clicked = true + } + } + + document.getElementsByTagName("div").asList().forEach { + if (it is HTMLElement) { + val clickHandler = it.onclick + if (clickHandler != null) { + clickHandler(uninitialized()) + } + } + } + + assertTrue(clicked) + } + + private fun <T> uninitialized(): T = null as T }
--- a/js/src/test/kotlin/trees.kt +++ b/js/src/test/kotlin/trees.kt @@ ... @@ import html4k.js.div +import html4k.js.onClickFunction import html4k.p +import org.w3c.dom.events.Event +import org.w3c.dom.HTMLDivElement +import org.w3c.dom.HTMLElement import org.w3c.dom.asList +import kotlin.dom.asList import kotlin.browser.document import kotlin.test.assertEquals +import kotlin.test.assertNotNull import kotlin.test.assertTrue @@ ... @@ } + + test fun appendSingleNode() { + val myDiv: HTMLDivElement = document.body!!.append.div { + p { + +"test" + } + } + + assertEquals("DIV", myDiv.tagName) + assertEquals(document.body, myDiv.parentNode) + assertEquals("<div><p>test</p></div>", myDiv.outerHTML.replace("\\s+".toRegex(), "")) + } + + test fun appendNodeWithEventHandler() { + var clicked = false + + document.body!!.append.div { + onClickFunction = { + clicked = true + } + } + + document.getElementsByTagName("div").asList().forEach { + if (it is HTMLElement) { + val clickHandler = it.onclick + if (clickHandler != null) { + clickHandler(uninitialized()) + } + } + } + + assertTrue(clicked) + } + + private fun <T> uninitialized(): T = null as T }
--- a/js/src/test/kotlin/trees.kt +++ b/js/src/test/kotlin/trees.kt @@ -4,6 +4,12 @@ CON import html4k.js.div ADD import html4k.js.onClickFunction CON import html4k.p ADD import org.w3c.dom.events.Event ADD import org.w3c.dom.HTMLDivElement ADD import org.w3c.dom.HTMLElement CON import org.w3c.dom.asList ADD import kotlin.dom.asList CON import kotlin.browser.document CON import kotlin.test.assertEquals ADD import kotlin.test.assertNotNull CON import kotlin.test.assertTrue @@ -26,2 +32,37 @@ CON } ADD ADD test fun appendSingleNode() { ADD val myDiv: HTMLDivElement = document.body!!.append.div { ADD p { ADD +"test" ADD } ADD } ADD ADD assertEquals("DIV", myDiv.tagName) ADD assertEquals(document.body, myDiv.parentNode) ADD assertEquals("<div><p>test</p></div>", myDiv.outerHTML.replace("\\s+".toRegex(), "")) ADD } ADD ADD test fun appendNodeWithEventHandler() { ADD var clicked = false ADD ADD document.body!!.append.div { ADD onClickFunction = { ADD clicked = true ADD } ADD } ADD ADD document.getElementsByTagName("div").asList().forEach { ADD if (it is HTMLElement) { ADD val clickHandler = it.onclick ADD if (clickHandler != null) { ADD clickHandler(uninitialized()) ADD } ADD } ADD } ADD ADD assertTrue(clicked) ADD } ADD ADD private fun <T> uninitialized(): T = null as T CON }
<<<<<<< SEARCH import html4k.dom.append import html4k.js.div import html4k.p import org.w3c.dom.asList import kotlin.browser.document import kotlin.test.assertEquals import kotlin.test.assertTrue import org.junit.Test as test ======= import html4k.dom.append import html4k.js.div import html4k.js.onClickFunction import html4k.p import org.w3c.dom.events.Event import org.w3c.dom.HTMLDivElement import org.w3c.dom.HTMLElement import org.w3c.dom.asList import kotlin.dom.asList import kotlin.browser.document import kotlin.test.assertEquals import kotlin.test.assertNotNull import kotlin.test.assertTrue import org.junit.Test as test >>>>>>> REPLACE <<<<<<< SEARCH assertEquals(node, document.body!!.children.asList().last()) } } ======= assertEquals(node, document.body!!.children.asList().last()) } test fun appendSingleNode() { val myDiv: HTMLDivElement = document.body!!.append.div { p { +"test" } } assertEquals("DIV", myDiv.tagName) assertEquals(document.body, myDiv.parentNode) assertEquals("<div><p>test</p></div>", myDiv.outerHTML.replace("\\s+".toRegex(), "")) } test fun appendNodeWithEventHandler() { var clicked = false document.body!!.append.div { onClickFunction = { clicked = true } } document.getElementsByTagName("div").asList().forEach { if (it is HTMLElement) { val clickHandler = it.onclick if (clickHandler != null) { clickHandler(uninitialized()) } } } assertTrue(clicked) } private fun <T> uninitialized(): T = null as T } >>>>>>> REPLACE
GNOME/librsvg
b799710f51363e89291ba81e925c1c217036e1c1
tests/src/legacy_sizing.rs
rust
lgpl-2.1
Test for no intrinsic size at all Part-of: <https://gitlab.gnome.org/GNOME/librsvg/-/merge_requests/595>
use cairo; use librsvg::{rsvg_convert_only::LegacySize, CairoRenderer}; use crate::utils::load_svg; #[test] fn just_viewbox_uses_viewbox_size() { let svg = load_svg( br#"<?xml version="1.0" encoding="UTF-8"?> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 200"/> "#, ) .unwrap(); assert_eq!( CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), (cairo::Rectangle { x: 0.0, y: 0.0, width: 100.0, height: 200.0, }, cairo::Rectangle { x: 0.0, y: 0.0, width: 100.0, height: 200.0, }) ); }
use cairo; use librsvg::{rsvg_convert_only::LegacySize, CairoRenderer}; use crate::utils::load_svg; #[test] fn just_viewbox_uses_viewbox_size() { let svg = load_svg( br#"<?xml version="1.0" encoding="UTF-8"?> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 200"/> "#, ) .unwrap(); assert_eq!( CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), (cairo::Rectangle { x: 0.0, y: 0.0, width: 100.0, height: 200.0, }, cairo::Rectangle { x: 0.0, y: 0.0, width: 100.0, height: 200.0, }) ); } #[test] fn no_intrinsic_size_uses_element_geometries() { let svg = load_svg( br#"<?xml version="1.0" encoding="UTF-8"?> <svg xmlns="http://www.w3.org/2000/svg"> <rect x="10" y="20" width="30" height="40" fill="black"/> </svg> "#, ) .unwrap(); assert_eq!( CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), (cairo::Rectangle { x: 10.0, y: 20.0, width: 30.0, height: 40.0, }, cairo::Rectangle { x: 10.0, y: 20.0, width: 30.0, height: 40.0, }) ); }
28
0
1
add_only
--- a/tests/src/legacy_sizing.rs +++ b/tests/src/legacy_sizing.rs @@ -31 +31,29 @@ } + +#[test] +fn no_intrinsic_size_uses_element_geometries() { + let svg = load_svg( + br#"<?xml version="1.0" encoding="UTF-8"?> +<svg xmlns="http://www.w3.org/2000/svg"> + <rect x="10" y="20" width="30" height="40" fill="black"/> +</svg> +"#, + ) + .unwrap(); + + assert_eq!( + CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), + (cairo::Rectangle { + x: 10.0, + y: 20.0, + width: 30.0, + height: 40.0, + }, + cairo::Rectangle { + x: 10.0, + y: 20.0, + width: 30.0, + height: 40.0, + }) + ); +}
--- a/tests/src/legacy_sizing.rs +++ b/tests/src/legacy_sizing.rs @@ ... @@ } + +#[test] +fn no_intrinsic_size_uses_element_geometries() { + let svg = load_svg( + br#"<?xml version="1.0" encoding="UTF-8"?> +<svg xmlns="http://www.w3.org/2000/svg"> + <rect x="10" y="20" width="30" height="40" fill="black"/> +</svg> +"#, + ) + .unwrap(); + + assert_eq!( + CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), + (cairo::Rectangle { + x: 10.0, + y: 20.0, + width: 30.0, + height: 40.0, + }, + cairo::Rectangle { + x: 10.0, + y: 20.0, + width: 30.0, + height: 40.0, + }) + ); +}
--- a/tests/src/legacy_sizing.rs +++ b/tests/src/legacy_sizing.rs @@ -31 +31,29 @@ CON } ADD ADD #[test] ADD fn no_intrinsic_size_uses_element_geometries() { ADD let svg = load_svg( ADD br#"<?xml version="1.0" encoding="UTF-8"?> ADD <svg xmlns="http://www.w3.org/2000/svg"> ADD <rect x="10" y="20" width="30" height="40" fill="black"/> ADD </svg> ADD "#, ADD ) ADD .unwrap(); ADD ADD assert_eq!( ADD CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), ADD (cairo::Rectangle { ADD x: 10.0, ADD y: 20.0, ADD width: 30.0, ADD height: 40.0, ADD }, ADD cairo::Rectangle { ADD x: 10.0, ADD y: 20.0, ADD width: 30.0, ADD height: 40.0, ADD }) ADD ); ADD }
<<<<<<< SEARCH ); } ======= ); } #[test] fn no_intrinsic_size_uses_element_geometries() { let svg = load_svg( br#"<?xml version="1.0" encoding="UTF-8"?> <svg xmlns="http://www.w3.org/2000/svg"> <rect x="10" y="20" width="30" height="40" fill="black"/> </svg> "#, ) .unwrap(); assert_eq!( CairoRenderer::new(&svg).legacy_layer_geometry(None).unwrap(), (cairo::Rectangle { x: 10.0, y: 20.0, width: 30.0, height: 40.0, }, cairo::Rectangle { x: 10.0, y: 20.0, width: 30.0, height: 40.0, }) ); } >>>>>>> REPLACE
ArkeologeN/node-obligatory
01c0ecc8e41dcd5e73a114fb02c8eb11d1817334
index.js
javascript
mit
Replace native w/ _.isObject function.
var klass = require('klass') , _ = require('lodash') , obligator = klass({ initialize: function() { this._params = {}; this._collection = {}; } }) .methods({ setCollection: function(collection) { if ( !collection ) throw Error("collection is empty or missing!"); var parameters = Object.getOwnPropertyNames(this) , i; for(i in parameters){ if(typeof(this[parameters[i]]) != "object"){ this._params[parameters[i]] = this[parameters[i]]; delete this[parameters[i]]; } } this._collection = _.isString(collection) ? JSON.parse(collection) : collection; }, validate: function(){ var params = this._params , missingParams = [] , property = ""; for(property in params) { if( params[property] != false && (_.isUndefined(this._collection[property]))) { missingParams.push(property); } } if( missingParams.length > 0) throw Error("Missing Parameter(s): " + missingParams); return true; }, reset: function() { this._collection = {}; this._params = {}; }, count: function() { var count = 0; for (var p in this._params) ++count; return count; }, }); exports.newFactoryInstance = function() { return new obligator; }
var klass = require('klass') , _ = require('lodash') , obligator = klass({ initialize: function() { this._params = {}; this._collection = {}; } }) .methods({ setCollection: function(collection) { if ( !collection ) throw Error("collection is empty or missing!"); var parameters = Object.getOwnPropertyNames(this) , i; for(i in parameters){ if(!_.isObject(this[parameters[i]])){ this._params[parameters[i]] = this[parameters[i]]; delete this[parameters[i]]; } } this._collection = _.isString(collection) ? JSON.parse(collection) : collection; }, validate: function(){ var params = this._params , missingParams = [] , property = ""; for(property in params) { if( params[property] != false && (_.isUndefined(this._collection[property]))) { missingParams.push(property); } } if( missingParams.length > 0) throw Error("Missing Parameter(s): " + missingParams); return true; }, reset: function() { this._collection = {}; this._params = {}; }, count: function() { var count = 0; for (var p in this._params) ++count; return count; }, }); exports.newFactoryInstance = function() { return new obligator; }
1
1
1
mixed
--- a/index.js +++ b/index.js @@ -18,3 +18,3 @@ for(i in parameters){ - if(typeof(this[parameters[i]]) != "object"){ + if(!_.isObject(this[parameters[i]])){ this._params[parameters[i]] = this[parameters[i]];
--- a/index.js +++ b/index.js @@ ... @@ for(i in parameters){ - if(typeof(this[parameters[i]]) != "object"){ + if(!_.isObject(this[parameters[i]])){ this._params[parameters[i]] = this[parameters[i]];
--- a/index.js +++ b/index.js @@ -18,3 +18,3 @@ CON for(i in parameters){ DEL if(typeof(this[parameters[i]]) != "object"){ ADD if(!_.isObject(this[parameters[i]])){ CON this._params[parameters[i]] = this[parameters[i]];
<<<<<<< SEARCH for(i in parameters){ if(typeof(this[parameters[i]]) != "object"){ this._params[parameters[i]] = this[parameters[i]]; delete this[parameters[i]]; ======= for(i in parameters){ if(!_.isObject(this[parameters[i]])){ this._params[parameters[i]] = this[parameters[i]]; delete this[parameters[i]]; >>>>>>> REPLACE
bloodybear/yona
ecd4c0f97afc45f07a381d9d161269435fe02a80
app/models/UserAction.java
java
apache-2.0
Add missed serial version UID
package models; import models.enumeration.ResourceType; import play.db.ebean.Model; import javax.persistence.*; import java.util.List; @MappedSuperclass abstract public class UserAction extends Model { @Id public Long id; @ManyToOne public User user; @Enumerated(EnumType.STRING) public models.enumeration.ResourceType resourceType; public String resourceId; public static <T extends UserAction> List<T> findBy(Finder<Long, T> finder, ResourceType resourceType, String resourceId) { return finder.where() .eq("resourceType", resourceType) .eq("resourceId", resourceId).findList(); } public static <T extends UserAction> T findBy(Finder<Long, T> finder, User subject, ResourceType resourceType, String resourceId) { return finder.where() .eq("user.id", subject.id) .eq("resourceType", resourceType) .eq("resourceId", resourceId).findUnique(); } public static <T extends UserAction> List<T> findBy(Finder<Long, T> finder, User subject, ResourceType resourceType) { return finder.where() .eq("user.id", subject.id) .eq("resourceType", resourceType).findList(); } }
package models; import models.enumeration.ResourceType; import play.db.ebean.Model; import javax.persistence.*; import java.util.List; @MappedSuperclass abstract public class UserAction extends Model { private static final long serialVersionUID = 7150871138735757127L; @Id public Long id; @ManyToOne public User user; @Enumerated(EnumType.STRING) public models.enumeration.ResourceType resourceType; public String resourceId; public static <T extends UserAction> List<T> findBy(Finder<Long, T> finder, ResourceType resourceType, String resourceId) { return finder.where() .eq("resourceType", resourceType) .eq("resourceId", resourceId).findList(); } public static <T extends UserAction> T findBy(Finder<Long, T> finder, User subject, ResourceType resourceType, String resourceId) { return finder.where() .eq("user.id", subject.id) .eq("resourceType", resourceType) .eq("resourceId", resourceId).findUnique(); } public static <T extends UserAction> List<T> findBy(Finder<Long, T> finder, User subject, ResourceType resourceType) { return finder.where() .eq("user.id", subject.id) .eq("resourceType", resourceType).findList(); } }
1
0
1
add_only
--- a/app/models/UserAction.java +++ b/app/models/UserAction.java @@ -10,2 +10,3 @@ abstract public class UserAction extends Model { + private static final long serialVersionUID = 7150871138735757127L; @Id
--- a/app/models/UserAction.java +++ b/app/models/UserAction.java @@ ... @@ abstract public class UserAction extends Model { + private static final long serialVersionUID = 7150871138735757127L; @Id
--- a/app/models/UserAction.java +++ b/app/models/UserAction.java @@ -10,2 +10,3 @@ CON abstract public class UserAction extends Model { ADD private static final long serialVersionUID = 7150871138735757127L; CON @Id
<<<<<<< SEARCH @MappedSuperclass abstract public class UserAction extends Model { @Id public Long id; ======= @MappedSuperclass abstract public class UserAction extends Model { private static final long serialVersionUID = 7150871138735757127L; @Id public Long id; >>>>>>> REPLACE
evestera/json_sample
e8bc8eb14f51d110ed842f4e308654b84ba8c484
json_typegen_cli/src/main.rs
rust
apache-2.0
CLI: Reimplement support for non-macro input
extern crate json_typegen_shared; extern crate clap; use json_typegen_shared::codegen_from_macro; use clap::{Arg, App}; fn main() { let matches = App::new("JSON code generation CLI") .version("0.1.0") .about("Generate Rust types from JSON samples") .arg(Arg::with_name("input") .help("The input macro to generate types from.") .takes_value(true) .required(true)) .get_matches(); let source = matches.value_of("input").unwrap(); let code = codegen_from_macro(&source).unwrap(); print!("{}", code); }
extern crate json_typegen_shared; extern crate clap; use json_typegen_shared::{codegen, codegen_from_macro, Options, infer_source_type}; use clap::{Arg, App}; use std::io::{self, Read, Write}; use std::fs::OpenOptions; fn main() { let matches = App::new("JSON code generation CLI") .version("0.1.0") .about("Generate Rust types from JSON samples") .arg( Arg::with_name("input") .help("The input to generate types from. A sample, file, URL, or macro.") .takes_value(true) .required(true), ) .arg( Arg::with_name("name") .short("n") .long("name") .help("Name for the root generated type. Default: Root.") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .help("What file to write the output to. Default: standard output.") .takes_value(true), ) .get_matches(); let source = matches.value_of("input").unwrap(); let input = if source == "-" { let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer).unwrap(); buffer } else { source.to_string() }; let code = if input.trim().starts_with("json_typegen") { codegen_from_macro(&input) } else { let name = matches.value_of("name").unwrap_or("Root"); codegen(&name, &infer_source_type(&input), Options::default()) }; if let Some(filename) = matches.value_of("output") { let mut file = OpenOptions::new() .read(true) .write(true) .create(true) .truncate(true) .open(filename) .unwrap(); file.write_all(code.unwrap().as_bytes()).unwrap(); } else { print!("{}", code.unwrap()); } }
55
10
2
mixed
--- a/json_typegen_cli/src/main.rs +++ b/json_typegen_cli/src/main.rs @@ -3,4 +3,6 @@ -use json_typegen_shared::codegen_from_macro; +use json_typegen_shared::{codegen, codegen_from_macro, Options, infer_source_type}; use clap::{Arg, App}; +use std::io::{self, Read, Write}; +use std::fs::OpenOptions; @@ -8,13 +10,56 @@ let matches = App::new("JSON code generation CLI") - .version("0.1.0") - .about("Generate Rust types from JSON samples") - .arg(Arg::with_name("input") - .help("The input macro to generate types from.") - .takes_value(true) - .required(true)) - .get_matches(); + .version("0.1.0") + .about("Generate Rust types from JSON samples") + .arg( + Arg::with_name("input") + .help("The input to generate types from. A sample, file, URL, or macro.") + .takes_value(true) + .required(true), + ) + .arg( + Arg::with_name("name") + .short("n") + .long("name") + .help("Name for the root generated type. Default: Root.") + .takes_value(true), + ) + .arg( + Arg::with_name("output") + .short("o") + .long("output") + .help("What file to write the output to. Default: standard output.") + .takes_value(true), + ) + .get_matches(); let source = matches.value_of("input").unwrap(); - let code = codegen_from_macro(&source).unwrap(); - print!("{}", code); + + let input = if source == "-" { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer).unwrap(); + buffer + } else { + source.to_string() + }; + + let code = if input.trim().starts_with("json_typegen") { + codegen_from_macro(&input) + } else { + let name = matches.value_of("name").unwrap_or("Root"); + codegen(&name, &infer_source_type(&input), Options::default()) + }; + + if let Some(filename) = matches.value_of("output") { + let mut file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(true) + .open(filename) + .unwrap(); + + file.write_all(code.unwrap().as_bytes()).unwrap(); + } else { + print!("{}", code.unwrap()); + } }
--- a/json_typegen_cli/src/main.rs +++ b/json_typegen_cli/src/main.rs @@ ... @@ -use json_typegen_shared::codegen_from_macro; +use json_typegen_shared::{codegen, codegen_from_macro, Options, infer_source_type}; use clap::{Arg, App}; +use std::io::{self, Read, Write}; +use std::fs::OpenOptions; @@ ... @@ let matches = App::new("JSON code generation CLI") - .version("0.1.0") - .about("Generate Rust types from JSON samples") - .arg(Arg::with_name("input") - .help("The input macro to generate types from.") - .takes_value(true) - .required(true)) - .get_matches(); + .version("0.1.0") + .about("Generate Rust types from JSON samples") + .arg( + Arg::with_name("input") + .help("The input to generate types from. A sample, file, URL, or macro.") + .takes_value(true) + .required(true), + ) + .arg( + Arg::with_name("name") + .short("n") + .long("name") + .help("Name for the root generated type. Default: Root.") + .takes_value(true), + ) + .arg( + Arg::with_name("output") + .short("o") + .long("output") + .help("What file to write the output to. Default: standard output.") + .takes_value(true), + ) + .get_matches(); let source = matches.value_of("input").unwrap(); - let code = codegen_from_macro(&source).unwrap(); - print!("{}", code); + + let input = if source == "-" { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer).unwrap(); + buffer + } else { + source.to_string() + }; + + let code = if input.trim().starts_with("json_typegen") { + codegen_from_macro(&input) + } else { + let name = matches.value_of("name").unwrap_or("Root"); + codegen(&name, &infer_source_type(&input), Options::default()) + }; + + if let Some(filename) = matches.value_of("output") { + let mut file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(true) + .open(filename) + .unwrap(); + + file.write_all(code.unwrap().as_bytes()).unwrap(); + } else { + print!("{}", code.unwrap()); + } }
--- a/json_typegen_cli/src/main.rs +++ b/json_typegen_cli/src/main.rs @@ -3,4 +3,6 @@ CON DEL use json_typegen_shared::codegen_from_macro; ADD use json_typegen_shared::{codegen, codegen_from_macro, Options, infer_source_type}; CON use clap::{Arg, App}; ADD use std::io::{self, Read, Write}; ADD use std::fs::OpenOptions; CON @@ -8,13 +10,56 @@ CON let matches = App::new("JSON code generation CLI") DEL .version("0.1.0") DEL .about("Generate Rust types from JSON samples") DEL .arg(Arg::with_name("input") DEL .help("The input macro to generate types from.") DEL .takes_value(true) DEL .required(true)) DEL .get_matches(); ADD .version("0.1.0") ADD .about("Generate Rust types from JSON samples") ADD .arg( ADD Arg::with_name("input") ADD .help("The input to generate types from. A sample, file, URL, or macro.") ADD .takes_value(true) ADD .required(true), ADD ) ADD .arg( ADD Arg::with_name("name") ADD .short("n") ADD .long("name") ADD .help("Name for the root generated type. Default: Root.") ADD .takes_value(true), ADD ) ADD .arg( ADD Arg::with_name("output") ADD .short("o") ADD .long("output") ADD .help("What file to write the output to. Default: standard output.") ADD .takes_value(true), ADD ) ADD .get_matches(); CON CON let source = matches.value_of("input").unwrap(); DEL let code = codegen_from_macro(&source).unwrap(); DEL print!("{}", code); ADD ADD let input = if source == "-" { ADD let mut buffer = String::new(); ADD io::stdin().read_to_string(&mut buffer).unwrap(); ADD buffer ADD } else { ADD source.to_string() ADD }; ADD ADD let code = if input.trim().starts_with("json_typegen") { ADD codegen_from_macro(&input) ADD } else { ADD let name = matches.value_of("name").unwrap_or("Root"); ADD codegen(&name, &infer_source_type(&input), Options::default()) ADD }; ADD ADD if let Some(filename) = matches.value_of("output") { ADD let mut file = OpenOptions::new() ADD .read(true) ADD .write(true) ADD .create(true) ADD .truncate(true) ADD .open(filename) ADD .unwrap(); ADD ADD file.write_all(code.unwrap().as_bytes()).unwrap(); ADD } else { ADD print!("{}", code.unwrap()); ADD } CON }
<<<<<<< SEARCH extern crate clap; use json_typegen_shared::codegen_from_macro; use clap::{Arg, App}; fn main() { let matches = App::new("JSON code generation CLI") .version("0.1.0") .about("Generate Rust types from JSON samples") .arg(Arg::with_name("input") .help("The input macro to generate types from.") .takes_value(true) .required(true)) .get_matches(); let source = matches.value_of("input").unwrap(); let code = codegen_from_macro(&source).unwrap(); print!("{}", code); } ======= extern crate clap; use json_typegen_shared::{codegen, codegen_from_macro, Options, infer_source_type}; use clap::{Arg, App}; use std::io::{self, Read, Write}; use std::fs::OpenOptions; fn main() { let matches = App::new("JSON code generation CLI") .version("0.1.0") .about("Generate Rust types from JSON samples") .arg( Arg::with_name("input") .help("The input to generate types from. A sample, file, URL, or macro.") .takes_value(true) .required(true), ) .arg( Arg::with_name("name") .short("n") .long("name") .help("Name for the root generated type. Default: Root.") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .help("What file to write the output to. Default: standard output.") .takes_value(true), ) .get_matches(); let source = matches.value_of("input").unwrap(); let input = if source == "-" { let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer).unwrap(); buffer } else { source.to_string() }; let code = if input.trim().starts_with("json_typegen") { codegen_from_macro(&input) } else { let name = matches.value_of("name").unwrap_or("Root"); codegen(&name, &infer_source_type(&input), Options::default()) }; if let Some(filename) = matches.value_of("output") { let mut file = OpenOptions::new() .read(true) .write(true) .create(true) .truncate(true) .open(filename) .unwrap(); file.write_all(code.unwrap().as_bytes()).unwrap(); } else { print!("{}", code.unwrap()); } } >>>>>>> REPLACE
ecomfe/rebas
3d22a51134963c3f188edeacb937d2a6f17a29e5
lib/env.js
javascript
mit
Fix process.nextTick, try to save context
/** * @file env * @author treelite([email protected]) */ /** * 修复全局的异步函数 * 使之能在异步回调中恢复请求请求上下文 * * @inner * @param {Object} app server */ function fixGlobalMethods(app) { var methods = ['setTimeout', 'setInterval', 'setImmediate']; methods.forEach(function (name) { var method = global[name]; global[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); var id = app.stashContext(); args.unshift(function () { app.revertContext(id); callback(); }); method.apply(null, args); }; }); } /** * 运行环境设置 * * @public * @param {Object} app server */ module.exports = function (app) { // 修复全局异步函数 fixGlobalMethods(app); };
/** * @file env * @author treelite([email protected]) */ /** * 修复异步函数 * 使之能在异步回调中恢复请求请求上下文 * * @inner * @param {Object} app server */ function fixAsyncMethods(app) { /** * 包裹异步函数 * * @param {Object} obj 模块 * @param {string} name 方法名 */ function wrap(obj, name) { var method = obj[name]; obj[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); var id = app.stashContext(); args.unshift(function () { app.revertContext(id); callback(); }); method.apply(null, args); }; } var methods = ['setTimeout', 'setInterval', 'setImmediate']; methods.forEach(function (name) { wrap(global, name); }); wrap(process, 'nextTick'); } /** * 运行环境设置 * * @public * @param {Object} app server */ module.exports = function (app) { // 修复异步函数 fixAsyncMethods(app); };
20
8
4
mixed
--- a/lib/env.js +++ b/lib/env.js @@ -6,3 +6,3 @@ /** - * 修复全局的异步函数 + * 修复异步函数 * 使之能在异步回调中恢复请求请求上下文 @@ -12,8 +12,13 @@ */ -function fixGlobalMethods(app) { - var methods = ['setTimeout', 'setInterval', 'setImmediate']; +function fixAsyncMethods(app) { - methods.forEach(function (name) { - var method = global[name]; - global[name] = function (callback) { + /** + * 包裹异步函数 + * + * @param {Object} obj 模块 + * @param {string} name 方法名 + */ + function wrap(obj, name) { + var method = obj[name]; + obj[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); @@ -26,3 +31,10 @@ }; + } + + var methods = ['setTimeout', 'setInterval', 'setImmediate']; + methods.forEach(function (name) { + wrap(global, name); }); + + wrap(process, 'nextTick'); } @@ -36,4 +48,4 @@ module.exports = function (app) { - // 修复全局异步函数 - fixGlobalMethods(app); + // 修复异步函数 + fixAsyncMethods(app); };
--- a/lib/env.js +++ b/lib/env.js @@ ... @@ /** - * 修复全局的异步函数 + * 修复异步函数 * 使之能在异步回调中恢复请求请求上下文 @@ ... @@ */ -function fixGlobalMethods(app) { - var methods = ['setTimeout', 'setInterval', 'setImmediate']; +function fixAsyncMethods(app) { - methods.forEach(function (name) { - var method = global[name]; - global[name] = function (callback) { + /** + * 包裹异步函数 + * + * @param {Object} obj 模块 + * @param {string} name 方法名 + */ + function wrap(obj, name) { + var method = obj[name]; + obj[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); @@ ... @@ }; + } + + var methods = ['setTimeout', 'setInterval', 'setImmediate']; + methods.forEach(function (name) { + wrap(global, name); }); + + wrap(process, 'nextTick'); } @@ ... @@ module.exports = function (app) { - // 修复全局异步函数 - fixGlobalMethods(app); + // 修复异步函数 + fixAsyncMethods(app); };
--- a/lib/env.js +++ b/lib/env.js @@ -6,3 +6,3 @@ CON /** DEL * 修复全局的异步函数 ADD * 修复异步函数 CON * 使之能在异步回调中恢复请求请求上下文 @@ -12,8 +12,13 @@ CON */ DEL function fixGlobalMethods(app) { DEL var methods = ['setTimeout', 'setInterval', 'setImmediate']; ADD function fixAsyncMethods(app) { CON DEL methods.forEach(function (name) { DEL var method = global[name]; DEL global[name] = function (callback) { ADD /** ADD * 包裹异步函数 ADD * ADD * @param {Object} obj 模块 ADD * @param {string} name 方法名 ADD */ ADD function wrap(obj, name) { ADD var method = obj[name]; ADD obj[name] = function (callback) { CON var args = Array.prototype.slice.call(arguments, 1); @@ -26,3 +31,10 @@ CON }; ADD } ADD ADD var methods = ['setTimeout', 'setInterval', 'setImmediate']; ADD methods.forEach(function (name) { ADD wrap(global, name); CON }); ADD ADD wrap(process, 'nextTick'); CON } @@ -36,4 +48,4 @@ CON module.exports = function (app) { DEL // 修复全局异步函数 DEL fixGlobalMethods(app); ADD // 修复异步函数 ADD fixAsyncMethods(app); CON };
<<<<<<< SEARCH /** * 修复全局的异步函数 * 使之能在异步回调中恢复请求请求上下文 * * @inner * @param {Object} app server */ function fixGlobalMethods(app) { var methods = ['setTimeout', 'setInterval', 'setImmediate']; methods.forEach(function (name) { var method = global[name]; global[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); var id = app.stashContext(); ======= /** * 修复异步函数 * 使之能在异步回调中恢复请求请求上下文 * * @inner * @param {Object} app server */ function fixAsyncMethods(app) { /** * 包裹异步函数 * * @param {Object} obj 模块 * @param {string} name 方法名 */ function wrap(obj, name) { var method = obj[name]; obj[name] = function (callback) { var args = Array.prototype.slice.call(arguments, 1); var id = app.stashContext(); >>>>>>> REPLACE <<<<<<< SEARCH method.apply(null, args); }; }); } ======= method.apply(null, args); }; } var methods = ['setTimeout', 'setInterval', 'setImmediate']; methods.forEach(function (name) { wrap(global, name); }); wrap(process, 'nextTick'); } >>>>>>> REPLACE <<<<<<< SEARCH */ module.exports = function (app) { // 修复全局异步函数 fixGlobalMethods(app); }; ======= */ module.exports = function (app) { // 修复异步函数 fixAsyncMethods(app); }; >>>>>>> REPLACE
j00bar/django-widgy
7f7fd4e7547af3a6d7e3cd4da025c2b0ab24508b
widgy/contrib/widgy_mezzanine/migrations/0001_initial.py
python
apache-2.0
Remove dependency for ReviewedVersionTracker in migrations The base widgy migrations had references to ReviewedVersionTracker, which is not part of the base widgy install. This commit changes the dependency to VersionTracker instead, which is part of the base widgy install.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import widgy.db.fields import django.db.models.deletion import widgy.contrib.widgy_mezzanine.models class Migration(migrations.Migration): dependencies = [ ('pages', '__first__'), ('review_queue', '0001_initial'), ] operations = [ migrations.CreateModel( name='WidgyPage', fields=[ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='review_queue.ReviewedVersionTracker', null=True)), ], options={ 'ordering': ('_order',), 'verbose_name': 'widgy page', 'verbose_name_plural': 'widgy pages', }, bases=(widgy.contrib.widgy_mezzanine.models.WidgyPageMixin, 'pages.page'), ), migrations.CreateModel( name='UndeletePage', fields=[ ], options={ 'ordering': ('_order',), 'verbose_name': 'restore deleted page', 'proxy': True, }, bases=('widgy_mezzanine.widgypage',), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import widgy.db.fields import django.db.models.deletion import widgy.contrib.widgy_mezzanine.models class Migration(migrations.Migration): dependencies = [ ('pages', '__first__'), ('widgy', '0001_initial'), ] operations = [ migrations.CreateModel( name='WidgyPage', fields=[ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='widgy.VersionTracker', null=True)), ], options={ 'ordering': ('_order',), 'verbose_name': 'widgy page', 'verbose_name_plural': 'widgy pages', }, bases=(widgy.contrib.widgy_mezzanine.models.WidgyPageMixin, 'pages.page'), ), migrations.CreateModel( name='UndeletePage', fields=[ ], options={ 'ordering': ('_order',), 'verbose_name': 'restore deleted page', 'proxy': True, }, bases=('widgy_mezzanine.widgypage',), ), ]
2
2
2
mixed
--- a/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py +++ b/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py @@ -13,3 +13,3 @@ ('pages', '__first__'), - ('review_queue', '0001_initial'), + ('widgy', '0001_initial'), ] @@ -21,3 +21,3 @@ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), - ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='review_queue.ReviewedVersionTracker', null=True)), + ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='widgy.VersionTracker', null=True)), ],
--- a/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py +++ b/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py @@ ... @@ ('pages', '__first__'), - ('review_queue', '0001_initial'), + ('widgy', '0001_initial'), ] @@ ... @@ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), - ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='review_queue.ReviewedVersionTracker', null=True)), + ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='widgy.VersionTracker', null=True)), ],
--- a/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py +++ b/widgy/contrib/widgy_mezzanine/migrations/0001_initial.py @@ -13,3 +13,3 @@ CON ('pages', '__first__'), DEL ('review_queue', '0001_initial'), ADD ('widgy', '0001_initial'), CON ] @@ -21,3 +21,3 @@ CON ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), DEL ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='review_queue.ReviewedVersionTracker', null=True)), ADD ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='widgy.VersionTracker', null=True)), CON ],
<<<<<<< SEARCH dependencies = [ ('pages', '__first__'), ('review_queue', '0001_initial'), ] ======= dependencies = [ ('pages', '__first__'), ('widgy', '0001_initial'), ] >>>>>>> REPLACE <<<<<<< SEARCH fields=[ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='review_queue.ReviewedVersionTracker', null=True)), ], options={ ======= fields=[ ('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')), ('root_node', widgy.db.fields.VersionedWidgyField(on_delete=django.db.models.deletion.SET_NULL, verbose_name='widgy content', to='widgy.VersionTracker', null=True)), ], options={ >>>>>>> REPLACE
bebeeteam/summernote
06a3d3880fec2c12e1e0a322ef4b98446e0f37e4
src/js/bs3/module/ImagePopover.js
javascript
mit
Remove image on backspace key event
define([ 'summernote/base/core/func', 'summernote/base/core/list', 'summernote/base/core/dom' ], function (func, list, dom) { var ImagePopover = function (context) { var ui = $.summernote.ui; var options = context.options; this.shouldInitialize = function () { return !list.isEmpty(options.popover.image); }; this.initialize = function () { this.$popover = ui.popover({ className: 'note-image-popover' }).render().appendTo('body'); var $content = this.$popover.find('.popover-content'); context.invoke('buttons.build', $content, options.popover.image); }; this.destroy = function () { this.$popover.remove(); }; this.update = function (target) { if (dom.isImg(target)) { var pos = dom.posFromPlaceholder(target); this.$popover.css({ display: 'block', left: pos.left, top: pos.top }); } else { this.hide(); } }; this.hide = function () { this.$popover.hide(); }; }; return ImagePopover; });
define([ 'summernote/base/core/func', 'summernote/base/core/list', 'summernote/base/core/dom', 'summernote/base/core/key' ], function (func, list, dom, key) { var ImagePopover = function (context) { var self = this; var ui = $.summernote.ui; var options = context.options; this.shouldInitialize = function () { return !list.isEmpty(options.popover.image); }; this.events = { 'summernote.keydown': function (we, e) { self.handleKeydown(e); } }; this.initialize = function () { this.$popover = ui.popover({ className: 'note-image-popover' }).render().appendTo('body'); var $content = this.$popover.find('.popover-content'); context.invoke('buttons.build', $content, options.popover.image); }; this.destroy = function () { this.$popover.remove(); }; this.isVisible = function () { return this.$popover.is(':visible'); }; this.update = function (target) { if (dom.isImg(target)) { var pos = dom.posFromPlaceholder(target); this.$popover.css({ display: 'block', left: pos.left, top: pos.top }); //be sure editor has focus to handle key events context.invoke('editor.focus'); } else { this.hide(); } }; this.hide = function () { this.$popover.hide(); }; this.handleKeydown = function (e) { if (list.contains([key.code.BACKSPACE], e.keyCode)) { if (this.isVisible()) { context.invoke('editor.removeMedia'); } } }; }; return ImagePopover; });
25
2
5
mixed
--- a/src/js/bs3/module/ImagePopover.js +++ b/src/js/bs3/module/ImagePopover.js @@ -3,5 +3,7 @@ 'summernote/base/core/list', - 'summernote/base/core/dom' -], function (func, list, dom) { + 'summernote/base/core/dom', + 'summernote/base/core/key' +], function (func, list, dom, key) { var ImagePopover = function (context) { + var self = this; var ui = $.summernote.ui; @@ -12,2 +14,8 @@ return !list.isEmpty(options.popover.image); + }; + + this.events = { + 'summernote.keydown': function (we, e) { + self.handleKeydown(e); + } }; @@ -27,2 +35,6 @@ + this.isVisible = function () { + return this.$popover.is(':visible'); + }; + this.update = function (target) { @@ -35,2 +47,5 @@ }); + + //be sure editor has focus to handle key events + context.invoke('editor.focus'); } else { @@ -43,2 +58,10 @@ }; + + this.handleKeydown = function (e) { + if (list.contains([key.code.BACKSPACE], e.keyCode)) { + if (this.isVisible()) { + context.invoke('editor.removeMedia'); + } + } + }; };
--- a/src/js/bs3/module/ImagePopover.js +++ b/src/js/bs3/module/ImagePopover.js @@ ... @@ 'summernote/base/core/list', - 'summernote/base/core/dom' -], function (func, list, dom) { + 'summernote/base/core/dom', + 'summernote/base/core/key' +], function (func, list, dom, key) { var ImagePopover = function (context) { + var self = this; var ui = $.summernote.ui; @@ ... @@ return !list.isEmpty(options.popover.image); + }; + + this.events = { + 'summernote.keydown': function (we, e) { + self.handleKeydown(e); + } }; @@ ... @@ + this.isVisible = function () { + return this.$popover.is(':visible'); + }; + this.update = function (target) { @@ ... @@ }); + + //be sure editor has focus to handle key events + context.invoke('editor.focus'); } else { @@ ... @@ }; + + this.handleKeydown = function (e) { + if (list.contains([key.code.BACKSPACE], e.keyCode)) { + if (this.isVisible()) { + context.invoke('editor.removeMedia'); + } + } + }; };
--- a/src/js/bs3/module/ImagePopover.js +++ b/src/js/bs3/module/ImagePopover.js @@ -3,5 +3,7 @@ CON 'summernote/base/core/list', DEL 'summernote/base/core/dom' DEL ], function (func, list, dom) { ADD 'summernote/base/core/dom', ADD 'summernote/base/core/key' ADD ], function (func, list, dom, key) { CON var ImagePopover = function (context) { ADD var self = this; CON var ui = $.summernote.ui; @@ -12,2 +14,8 @@ CON return !list.isEmpty(options.popover.image); ADD }; ADD ADD this.events = { ADD 'summernote.keydown': function (we, e) { ADD self.handleKeydown(e); ADD } CON }; @@ -27,2 +35,6 @@ CON ADD this.isVisible = function () { ADD return this.$popover.is(':visible'); ADD }; ADD CON this.update = function (target) { @@ -35,2 +47,5 @@ CON }); ADD ADD //be sure editor has focus to handle key events ADD context.invoke('editor.focus'); CON } else { @@ -43,2 +58,10 @@ CON }; ADD ADD this.handleKeydown = function (e) { ADD if (list.contains([key.code.BACKSPACE], e.keyCode)) { ADD if (this.isVisible()) { ADD context.invoke('editor.removeMedia'); ADD } ADD } ADD }; CON };
<<<<<<< SEARCH 'summernote/base/core/func', 'summernote/base/core/list', 'summernote/base/core/dom' ], function (func, list, dom) { var ImagePopover = function (context) { var ui = $.summernote.ui; ======= 'summernote/base/core/func', 'summernote/base/core/list', 'summernote/base/core/dom', 'summernote/base/core/key' ], function (func, list, dom, key) { var ImagePopover = function (context) { var self = this; var ui = $.summernote.ui; >>>>>>> REPLACE <<<<<<< SEARCH this.shouldInitialize = function () { return !list.isEmpty(options.popover.image); }; ======= this.shouldInitialize = function () { return !list.isEmpty(options.popover.image); }; this.events = { 'summernote.keydown': function (we, e) { self.handleKeydown(e); } }; >>>>>>> REPLACE <<<<<<< SEARCH }; this.update = function (target) { if (dom.isImg(target)) { ======= }; this.isVisible = function () { return this.$popover.is(':visible'); }; this.update = function (target) { if (dom.isImg(target)) { >>>>>>> REPLACE <<<<<<< SEARCH top: pos.top }); } else { this.hide(); ======= top: pos.top }); //be sure editor has focus to handle key events context.invoke('editor.focus'); } else { this.hide(); >>>>>>> REPLACE <<<<<<< SEARCH this.$popover.hide(); }; }; ======= this.$popover.hide(); }; this.handleKeydown = function (e) { if (list.contains([key.code.BACKSPACE], e.keyCode)) { if (this.isVisible()) { context.invoke('editor.removeMedia'); } } }; }; >>>>>>> REPLACE
mglukhikh/intellij-community
46da9197955776fcf9d7542417f8ceb54cbf6392
platform/platform-impl/src/com/intellij/ui/NotificationActions.kt
kotlin
apache-2.0
Rename one of NotificationAction classes to avoid confusion with another
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui import com.intellij.openapi.actionSystem.AnAction import com.intellij.openapi.actionSystem.AnActionEvent import com.intellij.openapi.wm.IdeFrame /** * @author Alexander Lobas */ abstract class NotificationAction : AnAction() { override fun update(e: AnActionEvent) { val layout = getBalloonLayout(e) e.presentation.isEnabled = layout != null && layout.balloonCount > 0 } protected fun getBalloonLayout(e: AnActionEvent) = e.getData(IdeFrame.KEY)?.balloonLayout as BalloonLayoutImpl? } class CloseFirstNotificationAction : NotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeFirst() } } class CloseAllNotificationsAction : NotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeAll() } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui import com.intellij.openapi.actionSystem.AnAction import com.intellij.openapi.actionSystem.AnActionEvent import com.intellij.openapi.wm.IdeFrame abstract class CloseNotificationAction : AnAction() { override fun update(e: AnActionEvent) { val layout = getBalloonLayout(e) e.presentation.isEnabled = layout != null && layout.balloonCount > 0 } protected fun getBalloonLayout(e: AnActionEvent) = e.getData(IdeFrame.KEY)?.balloonLayout as BalloonLayoutImpl? } class CloseFirstNotificationAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeFirst() } } class CloseAllNotificationsAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeAll() } }
3
7
3
mixed
--- a/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt +++ b/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt @@ -21,7 +21,3 @@ -/** - * @author Alexander Lobas - */ - -abstract class NotificationAction : AnAction() { +abstract class CloseNotificationAction : AnAction() { override fun update(e: AnActionEvent) { @@ -34,3 +30,3 @@ -class CloseFirstNotificationAction : NotificationAction() { +class CloseFirstNotificationAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { @@ -40,3 +36,3 @@ -class CloseAllNotificationsAction : NotificationAction() { +class CloseAllNotificationsAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) {
--- a/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt +++ b/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt @@ ... @@ -/** - * @author Alexander Lobas - */ - -abstract class NotificationAction : AnAction() { +abstract class CloseNotificationAction : AnAction() { override fun update(e: AnActionEvent) { @@ ... @@ -class CloseFirstNotificationAction : NotificationAction() { +class CloseFirstNotificationAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { @@ ... @@ -class CloseAllNotificationsAction : NotificationAction() { +class CloseAllNotificationsAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) {
--- a/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt +++ b/platform/platform-impl/src/com/intellij/ui/NotificationActions.kt @@ -21,7 +21,3 @@ CON DEL /** DEL * @author Alexander Lobas DEL */ DEL DEL abstract class NotificationAction : AnAction() { ADD abstract class CloseNotificationAction : AnAction() { CON override fun update(e: AnActionEvent) { @@ -34,3 +30,3 @@ CON DEL class CloseFirstNotificationAction : NotificationAction() { ADD class CloseFirstNotificationAction : CloseNotificationAction() { CON override fun actionPerformed(e: AnActionEvent) { @@ -40,3 +36,3 @@ CON DEL class CloseAllNotificationsAction : NotificationAction() { ADD class CloseAllNotificationsAction : CloseNotificationAction() { CON override fun actionPerformed(e: AnActionEvent) {
<<<<<<< SEARCH import com.intellij.openapi.wm.IdeFrame /** * @author Alexander Lobas */ abstract class NotificationAction : AnAction() { override fun update(e: AnActionEvent) { val layout = getBalloonLayout(e) ======= import com.intellij.openapi.wm.IdeFrame abstract class CloseNotificationAction : AnAction() { override fun update(e: AnActionEvent) { val layout = getBalloonLayout(e) >>>>>>> REPLACE <<<<<<< SEARCH } class CloseFirstNotificationAction : NotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeFirst() } } class CloseAllNotificationsAction : NotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeAll() ======= } class CloseFirstNotificationAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeFirst() } } class CloseAllNotificationsAction : CloseNotificationAction() { override fun actionPerformed(e: AnActionEvent) { getBalloonLayout(e)?.closeAll() >>>>>>> REPLACE
andschwa/rust-genetic-algorithm
1e9dc2c02a8aa21195d1e487ec389fc7a7f5a5f9
src/main.rs
rust
agpl-3.0
Implement tournament selection to obtain some convergence
extern crate rand; use rand::thread_rng; use rand::distributions::Range; use individual::Individual; mod individual; fn main() { let mut rng = thread_rng(); let range = Range::new(-512.03_f64, 511.97); // range for Schwefel problem // initialize population let mut population: Vec<_> = (0..128).map(|_| { Individual::new(&range, &mut rng) }).collect(); for i in 0..10000 { // generate mutated offspring population = population.iter().map(|x| { x.mutate(&range, &mut rng) }).collect(); let best = population.iter().min().unwrap(); if i % 100 == 0 { println!("{}th fitness: {}", i, best.fitness); } if best.fitness < 1000_f64 { println!("Solution: {:?}", best.solution); return; } } println!("Failed to converge."); }
extern crate rand; use rand::{Rng, thread_rng}; use rand::distributions::Range; use individual::Individual; mod individual; fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R) -> &'a Individual { let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect(); if let Some(selected) = population.iter().min() { return selected.unwrap(); } unimplemented!(); } fn main() { let mut rng = thread_rng(); let range = Range::new(-512.03_f64, 511.97); // range for Schwefel problem // initialize population let mut population: Vec<_> = (0..128).map(|_| { Individual::new(&range, &mut rng) }).collect(); for i in 0..10000 { // select and mutate individuals for next population population = (0..128).map(|_| { select(&population, &mut rng).mutate(&range, &mut rng) }).collect(); let best = population.iter().min().unwrap(); if i % 100 == 0 { println!("{}th fitness: {}", i, best.fitness); } if best.fitness < 1000_f64 { println!("{}th solution converged at {}: {:?}", i, best.fitness, best.solution); return; } } println!("Failed to converge."); }
15
5
4
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -2,3 +2,3 @@ -use rand::thread_rng; +use rand::{Rng, thread_rng}; use rand::distributions::Range; @@ -7,2 +7,11 @@ mod individual; + +fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R) + -> &'a Individual { + let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect(); + if let Some(selected) = population.iter().min() { + return selected.unwrap(); + } + unimplemented!(); +} @@ -18,5 +27,5 @@ for i in 0..10000 { - // generate mutated offspring - population = population.iter().map(|x| { - x.mutate(&range, &mut rng) + // select and mutate individuals for next population + population = (0..128).map(|_| { + select(&population, &mut rng).mutate(&range, &mut rng) }).collect(); @@ -29,3 +38,4 @@ if best.fitness < 1000_f64 { - println!("Solution: {:?}", best.solution); + println!("{}th solution converged at {}: {:?}", + i, best.fitness, best.solution); return;
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ -use rand::thread_rng; +use rand::{Rng, thread_rng}; use rand::distributions::Range; @@ ... @@ mod individual; + +fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R) + -> &'a Individual { + let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect(); + if let Some(selected) = population.iter().min() { + return selected.unwrap(); + } + unimplemented!(); +} @@ ... @@ for i in 0..10000 { - // generate mutated offspring - population = population.iter().map(|x| { - x.mutate(&range, &mut rng) + // select and mutate individuals for next population + population = (0..128).map(|_| { + select(&population, &mut rng).mutate(&range, &mut rng) }).collect(); @@ ... @@ if best.fitness < 1000_f64 { - println!("Solution: {:?}", best.solution); + println!("{}th solution converged at {}: {:?}", + i, best.fitness, best.solution); return;
--- a/src/main.rs +++ b/src/main.rs @@ -2,3 +2,3 @@ CON DEL use rand::thread_rng; ADD use rand::{Rng, thread_rng}; CON use rand::distributions::Range; @@ -7,2 +7,11 @@ CON mod individual; ADD ADD fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R) ADD -> &'a Individual { ADD let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect(); ADD if let Some(selected) = population.iter().min() { ADD return selected.unwrap(); ADD } ADD unimplemented!(); ADD } CON @@ -18,5 +27,5 @@ CON for i in 0..10000 { DEL // generate mutated offspring DEL population = population.iter().map(|x| { DEL x.mutate(&range, &mut rng) ADD // select and mutate individuals for next population ADD population = (0..128).map(|_| { ADD select(&population, &mut rng).mutate(&range, &mut rng) CON }).collect(); @@ -29,3 +38,4 @@ CON if best.fitness < 1000_f64 { DEL println!("Solution: {:?}", best.solution); ADD println!("{}th solution converged at {}: {:?}", ADD i, best.fitness, best.solution); CON return;
<<<<<<< SEARCH extern crate rand; use rand::thread_rng; use rand::distributions::Range; use individual::Individual; mod individual; fn main() { ======= extern crate rand; use rand::{Rng, thread_rng}; use rand::distributions::Range; use individual::Individual; mod individual; fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R) -> &'a Individual { let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect(); if let Some(selected) = population.iter().min() { return selected.unwrap(); } unimplemented!(); } fn main() { >>>>>>> REPLACE <<<<<<< SEARCH for i in 0..10000 { // generate mutated offspring population = population.iter().map(|x| { x.mutate(&range, &mut rng) }).collect(); ======= for i in 0..10000 { // select and mutate individuals for next population population = (0..128).map(|_| { select(&population, &mut rng).mutate(&range, &mut rng) }).collect(); >>>>>>> REPLACE <<<<<<< SEARCH if best.fitness < 1000_f64 { println!("Solution: {:?}", best.solution); return; } ======= if best.fitness < 1000_f64 { println!("{}th solution converged at {}: {:?}", i, best.fitness, best.solution); return; } >>>>>>> REPLACE
nimbis/django-selenium-testcase
4076fb322814848d802d1f925d163e90b3d629a9
selenium_testcase/testcases/forms.py
python
bsd-3-clause
Split get_input from set_input in FormTestMixin. In order to reduce side-effects, this commit moves the @wait_for to a get_input method and set_input operates immediately.
# -*- coding: utf-8 -*- from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form/*',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def set_input(self, field, value, **kwargs): input = self.find_element( self.input_search_list, field, **kwargs) input.clear() input.send_keys(value) return input
# -*- coding: utf-8 -*- from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def get_input(self, field, **kwargs): """ Return matching input field. """ return self.find_element( self.input_search_list, field, **kwargs) def set_input(self, field, value, **kwargs): """ Clear the field and enter value. """ element = self.get_input(field, **kwargs) element.clear() element.send_keys(value) return element
11
6
2
mixed
--- a/selenium_testcase/testcases/forms.py +++ b/selenium_testcase/testcases/forms.py @@ -17,3 +17,3 @@ (By.XPATH, '//form[@name="{}"]',), - (By.XPATH, '//form/*',), + (By.XPATH, '//form',), ) @@ -32,7 +32,12 @@ @wait_for + def get_input(self, field, **kwargs): + """ Return matching input field. """ + return self.find_element( + self.input_search_list, field, **kwargs) + def set_input(self, field, value, **kwargs): - input = self.find_element( - self.input_search_list, field, **kwargs) - input.clear() - input.send_keys(value) - return input + """ Clear the field and enter value. """ + element = self.get_input(field, **kwargs) + element.clear() + element.send_keys(value) + return element
--- a/selenium_testcase/testcases/forms.py +++ b/selenium_testcase/testcases/forms.py @@ ... @@ (By.XPATH, '//form[@name="{}"]',), - (By.XPATH, '//form/*',), + (By.XPATH, '//form',), ) @@ ... @@ @wait_for + def get_input(self, field, **kwargs): + """ Return matching input field. """ + return self.find_element( + self.input_search_list, field, **kwargs) + def set_input(self, field, value, **kwargs): - input = self.find_element( - self.input_search_list, field, **kwargs) - input.clear() - input.send_keys(value) - return input + """ Clear the field and enter value. """ + element = self.get_input(field, **kwargs) + element.clear() + element.send_keys(value) + return element
--- a/selenium_testcase/testcases/forms.py +++ b/selenium_testcase/testcases/forms.py @@ -17,3 +17,3 @@ CON (By.XPATH, '//form[@name="{}"]',), DEL (By.XPATH, '//form/*',), ADD (By.XPATH, '//form',), CON ) @@ -32,7 +32,12 @@ CON @wait_for ADD def get_input(self, field, **kwargs): ADD """ Return matching input field. """ ADD return self.find_element( ADD self.input_search_list, field, **kwargs) ADD CON def set_input(self, field, value, **kwargs): DEL input = self.find_element( DEL self.input_search_list, field, **kwargs) DEL input.clear() DEL input.send_keys(value) DEL return input ADD """ Clear the field and enter value. """ ADD element = self.get_input(field, **kwargs) ADD element.clear() ADD element.send_keys(value) ADD return element
<<<<<<< SEARCH (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form/*',), ) ======= (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form',), ) >>>>>>> REPLACE <<<<<<< SEARCH @wait_for def set_input(self, field, value, **kwargs): input = self.find_element( self.input_search_list, field, **kwargs) input.clear() input.send_keys(value) return input ======= @wait_for def get_input(self, field, **kwargs): """ Return matching input field. """ return self.find_element( self.input_search_list, field, **kwargs) def set_input(self, field, value, **kwargs): """ Clear the field and enter value. """ element = self.get_input(field, **kwargs) element.clear() element.send_keys(value) return element >>>>>>> REPLACE
damianavila/nikola
6926ddbb9cdbf05808339412cee5106e581f66cb
tests/import_wordpress_and_build_workflow.py
python
mit
Use the more or less new options for importing
# -*- coding: utf-8 -*- """ Script to test the import workflow. It will remove an existing Nikola installation and then install from the package directory. After that it will do create a new site with the import_wordpress command and use that newly created site to make a build. """ from __future__ import unicode_literals, print_function import os import shutil TEST_SITE_DIRECTORY = 'import_test_site' def main(import_directory=None): if import_directory is None: import_directory = TEST_SITE_DIRECTORY if os.path.exists(import_directory): print('deleting %s' % import_directory) shutil.rmtree(import_directory) test_directory = os.path.dirname(__file__) package_directory = os.path.abspath(os.path.join(test_directory, '..')) os.system('echo "y" | pip uninstall Nikola') os.system('pip install %s' % package_directory) os.system('nikola') import_file = os.path.join(test_directory, 'wordpress_export_example.xml') os.system( 'nikola import_wordpress -f %s -o %s' % (import_file, import_directory)) assert os.path.exists( import_directory), "The directory %s should be existing." os.chdir(import_directory) os.system('nikola build') if __name__ == '__main__': main()
# -*- coding: utf-8 -*- """ Script to test the import workflow. It will remove an existing Nikola installation and then install from the package directory. After that it will do create a new site with the import_wordpress command and use that newly created site to make a build. """ from __future__ import unicode_literals, print_function import os import shutil TEST_SITE_DIRECTORY = 'import_test_site' def main(import_directory=None): if import_directory is None: import_directory = TEST_SITE_DIRECTORY if os.path.exists(import_directory): print('deleting %s' % import_directory) shutil.rmtree(import_directory) test_directory = os.path.dirname(__file__) package_directory = os.path.abspath(os.path.join(test_directory, '..')) os.system('echo "y" | pip uninstall Nikola') os.system('pip install %s' % package_directory) os.system('nikola') import_file = os.path.join(test_directory, 'wordpress_export_example.xml') os.system( 'nikola import_wordpress -o {folder} {file}'.format(file=import_file, folder=import_directory)) assert os.path.exists( import_directory), "The directory %s should be existing." os.chdir(import_directory) os.system('nikola build') if __name__ == '__main__': main()
2
1
1
mixed
--- a/tests/import_wordpress_and_build_workflow.py +++ b/tests/import_wordpress_and_build_workflow.py @@ -33,3 +33,4 @@ os.system( - 'nikola import_wordpress -f %s -o %s' % (import_file, import_directory)) + 'nikola import_wordpress -o {folder} {file}'.format(file=import_file, + folder=import_directory))
--- a/tests/import_wordpress_and_build_workflow.py +++ b/tests/import_wordpress_and_build_workflow.py @@ ... @@ os.system( - 'nikola import_wordpress -f %s -o %s' % (import_file, import_directory)) + 'nikola import_wordpress -o {folder} {file}'.format(file=import_file, + folder=import_directory))
--- a/tests/import_wordpress_and_build_workflow.py +++ b/tests/import_wordpress_and_build_workflow.py @@ -33,3 +33,4 @@ CON os.system( DEL 'nikola import_wordpress -f %s -o %s' % (import_file, import_directory)) ADD 'nikola import_wordpress -o {folder} {file}'.format(file=import_file, ADD folder=import_directory)) CON
<<<<<<< SEARCH import_file = os.path.join(test_directory, 'wordpress_export_example.xml') os.system( 'nikola import_wordpress -f %s -o %s' % (import_file, import_directory)) assert os.path.exists( ======= import_file = os.path.join(test_directory, 'wordpress_export_example.xml') os.system( 'nikola import_wordpress -o {folder} {file}'.format(file=import_file, folder=import_directory)) assert os.path.exists( >>>>>>> REPLACE
micbou/JediHTTP
78d61ad0897b0a3f3f46c6df285f1a0907a0a910
jedihttp/handlers.py
python
apache-2.0
Add more info for completions
import bottle from bottle import response, request import json import jedi import logging app = bottle.Bottle( __name__ ) logger = logging.getLogger( __name__ ) @app.get( '/healthy' ) def healthy(): return _Json({}) @app.get( '/ready' ) def ready(): return _Json({}) @app.post( '/completions' ) def completion(): logger.info( 'received /completions request' ) script = _GetJediScript( request.json ) return _Json( { 'completions': [ { 'name': completion.name, 'description': completion.description, 'docstring': completion.docstring() } for completion in script.completions() ] } ) def _GetJediScript( request_data ): source = request_data[ 'source' ] line = request_data[ 'line' ] col = request_data[ 'col' ] path = request_data[ 'path' ] return jedi.Script( source, line, col, path ) def _Json( data ): response.content_type = 'application/json' return json.dumps( data )
import bottle from bottle import response, request import json import jedi import logging app = bottle.Bottle( __name__ ) logger = logging.getLogger( __name__ ) @app.get( '/healthy' ) def healthy(): return _Json({}) @app.get( '/ready' ) def ready(): return _Json({}) @app.post( '/completions' ) def completion(): logger.info( 'received /completions request' ) script = _GetJediScript( request.json ) return _Json( { 'completions': [ { 'name': completion.name, 'description': completion.description, 'docstring': completion.docstring(), 'module_path': completion.module_path, 'line': completion.line, 'column': completion.column } for completion in script.completions() ] } ) def _GetJediScript( request_data ): source = request_data[ 'source' ] line = request_data[ 'line' ] col = request_data[ 'col' ] path = request_data[ 'path' ] return jedi.Script( source, line, col, path ) def _Json( data ): response.content_type = 'application/json' return json.dumps( data )
4
1
1
mixed
--- a/jedihttp/handlers.py +++ b/jedihttp/handlers.py @@ -30,3 +30,6 @@ 'description': completion.description, - 'docstring': completion.docstring() + 'docstring': completion.docstring(), + 'module_path': completion.module_path, + 'line': completion.line, + 'column': completion.column } for completion in script.completions() ]
--- a/jedihttp/handlers.py +++ b/jedihttp/handlers.py @@ ... @@ 'description': completion.description, - 'docstring': completion.docstring() + 'docstring': completion.docstring(), + 'module_path': completion.module_path, + 'line': completion.line, + 'column': completion.column } for completion in script.completions() ]
--- a/jedihttp/handlers.py +++ b/jedihttp/handlers.py @@ -30,3 +30,6 @@ CON 'description': completion.description, DEL 'docstring': completion.docstring() ADD 'docstring': completion.docstring(), ADD 'module_path': completion.module_path, ADD 'line': completion.line, ADD 'column': completion.column CON } for completion in script.completions() ]
<<<<<<< SEARCH 'name': completion.name, 'description': completion.description, 'docstring': completion.docstring() } for completion in script.completions() ] } ) ======= 'name': completion.name, 'description': completion.description, 'docstring': completion.docstring(), 'module_path': completion.module_path, 'line': completion.line, 'column': completion.column } for completion in script.completions() ] } ) >>>>>>> REPLACE
RitwikGupta/picoCTF-shell-manager
a05a05f24c29dcf039e02b55c18c476dc69757df
shell_manager/problem_repo.py
python
mit
Update repo entrypoint and remote_update stub.
""" Problem repository management for the shell manager. """ import spur, gzip from shutil import copy2 from os.path import join def local_update(repo_path, deb_paths=[]): """ Updates a local deb repository by copying debs and running scanpackages. Args: repo_path: the path to the local repository. dep_paths: list of problem deb paths to copy. """ [copy2(deb_path, repo_path) for deb_path in deb_paths] shell = spur.LocalShell() result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path) packages_path = join(repo_path, "Packages.gz") with gzip.open(packages_path, "wb") as packages: packages.write(result.output) print("Updated problem repository.")
""" Problem repository management for the shell manager. """ import spur, gzip from shutil import copy2 from os.path import join def update_repo(args): """ Main entrypoint for repo update operations. """ if args.repo_type == "local": local_update(args.repository, args.package_paths) else: remote_update(args.repository, args.package_paths) def remote_update(repo_ui, deb_paths=[]): """ Pushes packages to a remote deb repository. Args: repo_uri: location of the repository. deb_paths: list of problem deb paths to copy. """ pass def local_update(repo_path, deb_paths=[]): """ Updates a local deb repository by copying debs and running scanpackages. Args: repo_path: the path to the local repository. dep_paths: list of problem deb paths to copy. """ [copy2(deb_path, repo_path) for deb_path in deb_paths] shell = spur.LocalShell() result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path) packages_path = join(repo_path, "Packages.gz") with gzip.open(packages_path, "wb") as packages: packages.write(result.output) print("Updated problem repository.")
21
0
1
add_only
--- a/shell_manager/problem_repo.py +++ b/shell_manager/problem_repo.py @@ -8,2 +8,23 @@ from os.path import join + +def update_repo(args): + """ + Main entrypoint for repo update operations. + """ + + if args.repo_type == "local": + local_update(args.repository, args.package_paths) + else: + remote_update(args.repository, args.package_paths) + +def remote_update(repo_ui, deb_paths=[]): + """ + Pushes packages to a remote deb repository. + + Args: + repo_uri: location of the repository. + deb_paths: list of problem deb paths to copy. + """ + + pass
--- a/shell_manager/problem_repo.py +++ b/shell_manager/problem_repo.py @@ ... @@ from os.path import join + +def update_repo(args): + """ + Main entrypoint for repo update operations. + """ + + if args.repo_type == "local": + local_update(args.repository, args.package_paths) + else: + remote_update(args.repository, args.package_paths) + +def remote_update(repo_ui, deb_paths=[]): + """ + Pushes packages to a remote deb repository. + + Args: + repo_uri: location of the repository. + deb_paths: list of problem deb paths to copy. + """ + + pass
--- a/shell_manager/problem_repo.py +++ b/shell_manager/problem_repo.py @@ -8,2 +8,23 @@ CON from os.path import join ADD ADD def update_repo(args): ADD """ ADD Main entrypoint for repo update operations. ADD """ ADD ADD if args.repo_type == "local": ADD local_update(args.repository, args.package_paths) ADD else: ADD remote_update(args.repository, args.package_paths) ADD ADD def remote_update(repo_ui, deb_paths=[]): ADD """ ADD Pushes packages to a remote deb repository. ADD ADD Args: ADD repo_uri: location of the repository. ADD deb_paths: list of problem deb paths to copy. ADD """ ADD ADD pass CON
<<<<<<< SEARCH from shutil import copy2 from os.path import join def local_update(repo_path, deb_paths=[]): ======= from shutil import copy2 from os.path import join def update_repo(args): """ Main entrypoint for repo update operations. """ if args.repo_type == "local": local_update(args.repository, args.package_paths) else: remote_update(args.repository, args.package_paths) def remote_update(repo_ui, deb_paths=[]): """ Pushes packages to a remote deb repository. Args: repo_uri: location of the repository. deb_paths: list of problem deb paths to copy. """ pass def local_update(repo_path, deb_paths=[]): >>>>>>> REPLACE
asomers/mockall
c15bd76f70be575cb4922be6d7b10348d9123ec3
mockall_examples/src/lib.rs
rust
apache-2.0
Add a use of mock! in mockall_examples
// vim: tw=80 //#![deny(missing_docs)] //! Examples of mock objects and their generated methods. //! //! This crate only exists to document the autogenerated methods of the //! [`Mockall`](https://docs.rs/mockall/latest/mockall) //! crate. You should never depend on this crate. // #[cfg(doc)] use mockall::*; /// A basic trait with several kinds of method. /// /// It is mocked by the [`MockFoo`](struct.MockFoo.html) struct. #[cfg(doc)] #[automock] pub trait Foo { /// A method with a `'static` return type fn foo(&self, x: i32, y: i16) -> i32; /// A method returning a reference fn bar(&self, x: i32) -> &i32; /// A method returning a mutable reference fn baz(&mut self, x: i32) -> &mut i32; /// A method returning a `'static` reference fn bean(&self) -> &'static i32; /// A static method fn bang(x: i32) -> i32; } #[cfg(doc)] #[automock(mod mock_ffi;)] extern "C" { /// A foreign "C" function pub fn ffi_func(); }
// vim: tw=80 #![deny(missing_docs)] //! Examples of mock objects and their generated methods. //! //! This crate only exists to document the autogenerated methods of the //! [`Mockall`](https://docs.rs/mockall/latest/mockall) //! crate. You should never depend on this crate. // #[cfg(doc)] use mockall::*; /// A basic trait with several kinds of method. /// /// It is mocked by the [`MockFoo`](struct.MockFoo.html) struct. #[cfg(doc)] #[automock] pub trait Foo { /// A method with a `'static` return type fn foo(&self, x: i32, y: i16) -> i32; /// A method returning a reference fn bar(&self, x: i32) -> &i32; /// A method returning a mutable reference fn baz(&mut self, x: i32) -> &mut i32; /// A method returning a `'static` reference fn bean(&self) -> &'static i32; /// A static method fn bang(x: i32) -> i32; } /// A trait implemented by a Struct we want to mock pub trait Bah { /// Some trait method fn bah(&self); } #[cfg(doc)] mock! { /// structs can be mocked with `mock!` /// /// Their mock methods have an identical API to the methods generated by /// `#[automock]` pub Boo { /// A method on a struct fn boo(&self); } /// An implementation of a trait on a mocked struct trait Bah { fn bah(&self); } } #[cfg(doc)] #[automock(mod mock_ffi;)] extern "C" { /// A foreign "C" function pub fn ffi_func(); }
23
1
2
mixed
--- a/mockall_examples/src/lib.rs +++ b/mockall_examples/src/lib.rs @@ -1,3 +1,3 @@ // vim: tw=80 -//#![deny(missing_docs)] +#![deny(missing_docs)] @@ -35,2 +35,24 @@ +/// A trait implemented by a Struct we want to mock +pub trait Bah { + /// Some trait method + fn bah(&self); +} + +#[cfg(doc)] +mock! { + /// structs can be mocked with `mock!` + /// + /// Their mock methods have an identical API to the methods generated by + /// `#[automock]` + pub Boo { + /// A method on a struct + fn boo(&self); + } + /// An implementation of a trait on a mocked struct + trait Bah { + fn bah(&self); + } +} + #[cfg(doc)]
--- a/mockall_examples/src/lib.rs +++ b/mockall_examples/src/lib.rs @@ ... @@ // vim: tw=80 -//#![deny(missing_docs)] +#![deny(missing_docs)] @@ ... @@ +/// A trait implemented by a Struct we want to mock +pub trait Bah { + /// Some trait method + fn bah(&self); +} + +#[cfg(doc)] +mock! { + /// structs can be mocked with `mock!` + /// + /// Their mock methods have an identical API to the methods generated by + /// `#[automock]` + pub Boo { + /// A method on a struct + fn boo(&self); + } + /// An implementation of a trait on a mocked struct + trait Bah { + fn bah(&self); + } +} + #[cfg(doc)]
--- a/mockall_examples/src/lib.rs +++ b/mockall_examples/src/lib.rs @@ -1,3 +1,3 @@ CON // vim: tw=80 DEL //#![deny(missing_docs)] ADD #![deny(missing_docs)] CON @@ -35,2 +35,24 @@ CON ADD /// A trait implemented by a Struct we want to mock ADD pub trait Bah { ADD /// Some trait method ADD fn bah(&self); ADD } ADD ADD #[cfg(doc)] ADD mock! { ADD /// structs can be mocked with `mock!` ADD /// ADD /// Their mock methods have an identical API to the methods generated by ADD /// `#[automock]` ADD pub Boo { ADD /// A method on a struct ADD fn boo(&self); ADD } ADD /// An implementation of a trait on a mocked struct ADD trait Bah { ADD fn bah(&self); ADD } ADD } ADD CON #[cfg(doc)]
<<<<<<< SEARCH // vim: tw=80 //#![deny(missing_docs)] //! Examples of mock objects and their generated methods. ======= // vim: tw=80 #![deny(missing_docs)] //! Examples of mock objects and their generated methods. >>>>>>> REPLACE <<<<<<< SEARCH } #[cfg(doc)] #[automock(mod mock_ffi;)] ======= } /// A trait implemented by a Struct we want to mock pub trait Bah { /// Some trait method fn bah(&self); } #[cfg(doc)] mock! { /// structs can be mocked with `mock!` /// /// Their mock methods have an identical API to the methods generated by /// `#[automock]` pub Boo { /// A method on a struct fn boo(&self); } /// An implementation of a trait on a mocked struct trait Bah { fn bah(&self); } } #[cfg(doc)] #[automock(mod mock_ffi;)] >>>>>>> REPLACE
facebook/fbthrift
31d7b90e30de5f90891e4a845f6704e4c13748df
thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs
rust
apache-2.0
Update autocargo component on FBS:master Summary: Automated component version update Bump Schedule: https://www.internalfb.com/intern/msdk/bump/?schedule_fbid=342556550408072 Package: https://www.internalfb.com/intern/msdk/package/125803836415945/ Oncall Team: rust_foundation NOTE: This build is expected to expire at 2022/10/11 06:06AM PDT --------- New project source changes since last bump based on 9cc37ee9d47923093119b37d4f2d60de5a5e490f at 2021/09/21 01:03PM UTC: | 2021/09/21 01:05PM -05 | generatedunixname89002005294178 | D31082067 | [MSDK] Update autocargo component on FBS:master | | 2021/09/29 01:58PM PDT | vgao1996 | D31115820 (https://github.com/facebook/fbthrift/commit/5b708efab9c17ed9797de50b0495cfc94d9964f7) | [Rust] update rand and quickcheck | | 2021/09/30 02:55PM PDT | jkeljo | D31284743 (https://github.com/facebook/fbthrift/commit/25587a6a95209b773a63b5060482a2c5995cca86) | [rust][third-party] Enable `unbounded_depth` feature for `serde_json` | | 2021/10/08 11:43AM BST | krallin | D31471849 | autocargo: emit rerun-if-changed on the buildscript itself | --------- build-break (bot commits are not reviewed by a human) Reviewed By: farnz Differential Revision: D31541798 fbshipit-source-id: 8e41a39d552c31a879c41b7d244e2caca047b7da
// @generated by autocargo use std::env; use std::fs; use std::path::Path; use thrift_compiler::Config; #[rustfmt::skip] fn main() { let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided"); let out_dir: &Path = out_dir.as_ref(); fs::write( out_dir.join("cratemap"), "test_thrift crate", ).expect("Failed to write cratemap"); let conf = { let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config"); let path_from_manifest_to_base: &Path = "../../../../../..".as_ref(); let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided"); let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref(); let base_path = cargo_manifest_dir .join(path_from_manifest_to_base) .canonicalize() .expect("Failed to canonicalize base_path"); conf.base_path(base_path); let options = "serde"; if !options.is_empty() { conf.options(options); } let include_srcs = vec![ ]; conf.include_srcs(include_srcs); conf }; conf .run(&[ "../test_thrift.thrift" ]) .expect("Failed while running thrift compilation"); }
// @generated by autocargo use std::env; use std::fs; use std::path::Path; use thrift_compiler::Config; #[rustfmt::skip] fn main() { // Rerun if this gets rewritten. println!("cargo:rerun-if-changed=thrift_build.rs"); let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided"); let out_dir: &Path = out_dir.as_ref(); fs::write( out_dir.join("cratemap"), "test_thrift crate", ).expect("Failed to write cratemap"); let conf = { let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config"); let path_from_manifest_to_base: &Path = "../../../../../..".as_ref(); let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided"); let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref(); let base_path = cargo_manifest_dir .join(path_from_manifest_to_base) .canonicalize() .expect("Failed to canonicalize base_path"); conf.base_path(base_path); let options = "serde"; if !options.is_empty() { conf.options(options); } let include_srcs = vec![ ]; conf.include_srcs(include_srcs); conf }; conf .run(&[ "../test_thrift.thrift" ]) .expect("Failed while running thrift compilation"); }
3
0
1
add_only
--- a/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs +++ b/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs @@ -9,2 +9,5 @@ fn main() { + // Rerun if this gets rewritten. + println!("cargo:rerun-if-changed=thrift_build.rs"); + let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
--- a/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs +++ b/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs @@ ... @@ fn main() { + // Rerun if this gets rewritten. + println!("cargo:rerun-if-changed=thrift_build.rs"); + let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
--- a/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs +++ b/thrift/lib/rust/src/dep_tests/cargo_thrift/thrift_build.rs @@ -9,2 +9,5 @@ CON fn main() { ADD // Rerun if this gets rewritten. ADD println!("cargo:rerun-if-changed=thrift_build.rs"); ADD CON let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
<<<<<<< SEARCH #[rustfmt::skip] fn main() { let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided"); let out_dir: &Path = out_dir.as_ref(); ======= #[rustfmt::skip] fn main() { // Rerun if this gets rewritten. println!("cargo:rerun-if-changed=thrift_build.rs"); let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided"); let out_dir: &Path = out_dir.as_ref(); >>>>>>> REPLACE
otovo/python-netsgiro
43a8a83014c2d77b37615f28e695fa861350d0bf
setup.py
python
apache-2.0
Add attrs and typing to deps
import re from setuptools import find_packages, setup with open('netsgiro/__init__.py') as fh: metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read())) with open('README.rst') as fh: long_description = fh.read() setup( name='netsgiro', version=metadata['version'], description='File parsers for Nets AvtaleGiro and OCR Giro', long_description=long_description, url='https://github.com/otovo/python-netsgiro', author='Otovo AS', license='Apache License, Version 2.0', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords='avtalegiro ocr giro', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ ], extras_require={ 'dev': [ 'check-manifest', 'flake8', 'flake8-import-order', 'mypy', 'pytest', 'pytest-xdist', 'tox', ], }, )
import re from setuptools import find_packages, setup with open('netsgiro/__init__.py') as fh: metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read())) with open('README.rst') as fh: long_description = fh.read() setup( name='netsgiro', version=metadata['version'], description='File parsers for Nets AvtaleGiro and OCR Giro', long_description=long_description, url='https://github.com/otovo/python-netsgiro', author='Otovo AS', license='Apache License, Version 2.0', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], keywords='avtalegiro ocr giro', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'attrs', 'typing', # Needed for Python 3.4 ], extras_require={ 'dev': [ 'check-manifest', 'flake8', 'flake8-import-order', 'mypy', 'pytest', 'pytest-xdist', 'tox', ], }, )
2
0
1
add_only
--- a/setup.py +++ b/setup.py @@ -33,2 +33,4 @@ install_requires=[ + 'attrs', + 'typing', # Needed for Python 3.4 ],
--- a/setup.py +++ b/setup.py @@ ... @@ install_requires=[ + 'attrs', + 'typing', # Needed for Python 3.4 ],
--- a/setup.py +++ b/setup.py @@ -33,2 +33,4 @@ CON install_requires=[ ADD 'attrs', ADD 'typing', # Needed for Python 3.4 CON ],
<<<<<<< SEARCH packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ ], extras_require={ ======= packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'attrs', 'typing', # Needed for Python 3.4 ], extras_require={ >>>>>>> REPLACE
AleksanderMielczarek/Napkin
5cd4f1e2df23137d7eb9bf42e94a981d17f82678
napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java
java
apache-2.0
Allow to retrieve component from View
package com.github.aleksandermielczarek.napkin; import android.content.Context; import android.support.v4.app.Fragment; /** * Created by Aleksander Mielczarek on 14.05.2016. */ public class Napkin { private Napkin() { } public static <T> T provideComponent(Object object) { ComponentProvider<T> componentProvider = (ComponentProvider<T>) object; return componentProvider.provideComponent(); } public static <T> T provideAppComponent(Context context) { Context application = context.getApplicationContext(); return provideComponent(application); } public static <T> T provideAppComponent(Fragment fragment) { Context application = fragment.getContext().getApplicationContext(); return provideComponent(application); } public static <T> T provideActivityComponent(Fragment fragment) { Context activity = fragment.getActivity(); return provideComponent(activity); } }
package com.github.aleksandermielczarek.napkin; import android.content.Context; import android.support.v4.app.Fragment; import android.view.View; /** * Created by Aleksander Mielczarek on 14.05.2016. */ public class Napkin { private Napkin() { } @SuppressWarnings("unchecked") public static <T> T provideComponent(Object object) { ComponentProvider<T> componentProvider = (ComponentProvider<T>) object; return componentProvider.provideComponent(); } public static <T> T provideAppComponent(Context context) { Context application = context.getApplicationContext(); return provideComponent(application); } public static <T> T provideAppComponent(Fragment fragment) { Context context = fragment.getContext(); return provideAppComponent(context); } public static <T> T provideAppComponent(View view) { Context context = view.getContext(); return provideAppComponent(context); } public static <T> T provideActivityComponent(Fragment fragment) { Context activity = fragment.getActivity(); return provideComponent(activity); } public static <T> T provideActivityComponent(View view) { Context activity = view.getContext(); return provideComponent(activity); } }
13
2
4
mixed
--- a/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java +++ b/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java @@ -4,2 +4,3 @@ import android.support.v4.app.Fragment; +import android.view.View; @@ -14,2 +15,3 @@ + @SuppressWarnings("unchecked") public static <T> T provideComponent(Object object) { @@ -25,4 +27,9 @@ public static <T> T provideAppComponent(Fragment fragment) { - Context application = fragment.getContext().getApplicationContext(); - return provideComponent(application); + Context context = fragment.getContext(); + return provideAppComponent(context); + } + + public static <T> T provideAppComponent(View view) { + Context context = view.getContext(); + return provideAppComponent(context); } @@ -34,2 +41,6 @@ + public static <T> T provideActivityComponent(View view) { + Context activity = view.getContext(); + return provideComponent(activity); + } }
--- a/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java +++ b/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java @@ ... @@ import android.support.v4.app.Fragment; +import android.view.View; @@ ... @@ + @SuppressWarnings("unchecked") public static <T> T provideComponent(Object object) { @@ ... @@ public static <T> T provideAppComponent(Fragment fragment) { - Context application = fragment.getContext().getApplicationContext(); - return provideComponent(application); + Context context = fragment.getContext(); + return provideAppComponent(context); + } + + public static <T> T provideAppComponent(View view) { + Context context = view.getContext(); + return provideAppComponent(context); } @@ ... @@ + public static <T> T provideActivityComponent(View view) { + Context activity = view.getContext(); + return provideComponent(activity); + } }
--- a/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java +++ b/napkin/src/main/java/com/github/aleksandermielczarek/napkin/Napkin.java @@ -4,2 +4,3 @@ CON import android.support.v4.app.Fragment; ADD import android.view.View; CON @@ -14,2 +15,3 @@ CON ADD @SuppressWarnings("unchecked") CON public static <T> T provideComponent(Object object) { @@ -25,4 +27,9 @@ CON public static <T> T provideAppComponent(Fragment fragment) { DEL Context application = fragment.getContext().getApplicationContext(); DEL return provideComponent(application); ADD Context context = fragment.getContext(); ADD return provideAppComponent(context); ADD } ADD ADD public static <T> T provideAppComponent(View view) { ADD Context context = view.getContext(); ADD return provideAppComponent(context); CON } @@ -34,2 +41,6 @@ CON ADD public static <T> T provideActivityComponent(View view) { ADD Context activity = view.getContext(); ADD return provideComponent(activity); ADD } CON }
<<<<<<< SEARCH import android.content.Context; import android.support.v4.app.Fragment; /** ======= import android.content.Context; import android.support.v4.app.Fragment; import android.view.View; /** >>>>>>> REPLACE <<<<<<< SEARCH } public static <T> T provideComponent(Object object) { ComponentProvider<T> componentProvider = (ComponentProvider<T>) object; ======= } @SuppressWarnings("unchecked") public static <T> T provideComponent(Object object) { ComponentProvider<T> componentProvider = (ComponentProvider<T>) object; >>>>>>> REPLACE <<<<<<< SEARCH public static <T> T provideAppComponent(Fragment fragment) { Context application = fragment.getContext().getApplicationContext(); return provideComponent(application); } ======= public static <T> T provideAppComponent(Fragment fragment) { Context context = fragment.getContext(); return provideAppComponent(context); } public static <T> T provideAppComponent(View view) { Context context = view.getContext(); return provideAppComponent(context); } >>>>>>> REPLACE <<<<<<< SEARCH } } ======= } public static <T> T provideActivityComponent(View view) { Context activity = view.getContext(); return provideComponent(activity); } } >>>>>>> REPLACE
adamcharnock/django-su
9c037ed3ebe7353b419562311bbc1f07875ab358
django_su/forms.py
python
mit
Update UserSuForm to enhance compatibility with custom user models. In custom user models, we cannot rely on there being a 'username' field. Instead, we should use whichever field has been specified as the username field.
# -*- coding: utf-8 -*- from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from . import get_user_model class UserSuForm(forms.Form): user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( 'username'), required=True) # pylint: disable=W0212 use_ajax_select = False def __init__(self, *args, **kwargs): super(UserSuForm, self).__init__(*args, **kwargs) if 'ajax_select' in settings.INSTALLED_APPS and getattr( settings, 'AJAX_LOOKUP_CHANNELS', None): from ajax_select.fields import AutoCompleteSelectField lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None) if lookup is not None: old_field = self.fields['user'] self.fields['user'] = AutoCompleteSelectField( 'django_su', required=old_field.required, label=old_field.label, ) self.use_ajax_select = True def get_user(self): return self.cleaned_data.get('user', None) def __str__(self): if 'formadmin' in settings.INSTALLED_APPS: try: from formadmin.forms import as_django_admin return as_django_admin(self) except ImportError: pass return super(UserSuForm, self).__str__()
# -*- coding: utf-8 -*- from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from . import get_user_model class UserSuForm(forms.Form): username_field = get_user_model().USERNAME_FIELD user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( username_field), required=True) # pylint: disable=W0212 use_ajax_select = False def __init__(self, *args, **kwargs): super(UserSuForm, self).__init__(*args, **kwargs) if 'ajax_select' in settings.INSTALLED_APPS and getattr( settings, 'AJAX_LOOKUP_CHANNELS', None): from ajax_select.fields import AutoCompleteSelectField lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None) if lookup is not None: old_field = self.fields['user'] self.fields['user'] = AutoCompleteSelectField( 'django_su', required=old_field.required, label=old_field.label, ) self.use_ajax_select = True def get_user(self): return self.cleaned_data.get('user', None) def __str__(self): if 'formadmin' in settings.INSTALLED_APPS: try: from formadmin.forms import as_django_admin return as_django_admin(self) except ImportError: pass return super(UserSuForm, self).__str__()
3
1
1
mixed
--- a/django_su/forms.py +++ b/django_su/forms.py @@ -11,5 +11,7 @@ + username_field = get_user_model().USERNAME_FIELD + user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( - 'username'), required=True) # pylint: disable=W0212 + username_field), required=True) # pylint: disable=W0212
--- a/django_su/forms.py +++ b/django_su/forms.py @@ ... @@ + username_field = get_user_model().USERNAME_FIELD + user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( - 'username'), required=True) # pylint: disable=W0212 + username_field), required=True) # pylint: disable=W0212
--- a/django_su/forms.py +++ b/django_su/forms.py @@ -11,5 +11,7 @@ CON ADD username_field = get_user_model().USERNAME_FIELD ADD CON user = forms.ModelChoiceField( CON label=_('Users'), queryset=get_user_model()._default_manager.order_by( DEL 'username'), required=True) # pylint: disable=W0212 ADD username_field), required=True) # pylint: disable=W0212 CON
<<<<<<< SEARCH class UserSuForm(forms.Form): user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( 'username'), required=True) # pylint: disable=W0212 use_ajax_select = False ======= class UserSuForm(forms.Form): username_field = get_user_model().USERNAME_FIELD user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( username_field), required=True) # pylint: disable=W0212 use_ajax_select = False >>>>>>> REPLACE
AugustinLF/react-training
473d6d6e29d56f8ac54832240479558b2114ea1b
src/corrections/mainPracticeCorrection/components/dataFetcher.js
javascript
mit
Replace useState by useReducer in DataFetcher
import * as React from 'react'; import PropTypes from 'prop-types'; const DataFetcher = ({children, getData, parameters}) => { const [isLoading, setIsLoading] = React.useState(true); const [data, setData] = React.useState(null); const fetchData = parameters => { if (parameters === null) return; setIsLoading(true); getData(parameters).then(results => { setData(results); setIsLoading(false); }); }; React.useEffect( () => { fetchData(parameters); }, [parameters] ); return children({data, isLoading}); }; DataFetcher.propTypes = { children: PropTypes.func.isRequired, getData: PropTypes.func.isRequired, parameters: PropTypes.any, }; export default DataFetcher;
import * as React from 'react'; import PropTypes from 'prop-types'; const DataFetcher = ({children, getData, parameters}) => { const [state, dispatch] = React.useReducer( (state, action) => { switch (action.type) { case 'FETCH_DATA': return { isLoading: true, data: null, }; case 'SUCCESS': return { isLoading: false, data: action.payload, }; default: throw new Error('The reducer of DataFetch was called with the wrong action'); } }, { isLoading: true, data: null, } ); const fetchData = parameters => { if (parameters === null) return; dispatch({type: 'FETCH_DATA'}); getData(parameters).then(results => { dispatch({ type: 'SUCCESS', payload: results, }); }); }; React.useEffect( () => { fetchData(parameters); }, [parameters] ); return children({data: state.data, isLoading: state.isLoading}); }; DataFetcher.propTypes = { children: PropTypes.func.isRequired, getData: PropTypes.func.isRequired, parameters: PropTypes.any, }; export default DataFetcher;
28
6
3
mixed
--- a/src/corrections/mainPracticeCorrection/components/dataFetcher.js +++ b/src/corrections/mainPracticeCorrection/components/dataFetcher.js @@ -4,4 +4,24 @@ const DataFetcher = ({children, getData, parameters}) => { - const [isLoading, setIsLoading] = React.useState(true); - const [data, setData] = React.useState(null); + const [state, dispatch] = React.useReducer( + (state, action) => { + switch (action.type) { + case 'FETCH_DATA': + return { + isLoading: true, + data: null, + }; + case 'SUCCESS': + return { + isLoading: false, + data: action.payload, + }; + default: + throw new Error('The reducer of DataFetch was called with the wrong action'); + } + }, + { + isLoading: true, + data: null, + } + ); @@ -9,6 +29,8 @@ if (parameters === null) return; - setIsLoading(true); + dispatch({type: 'FETCH_DATA'}); getData(parameters).then(results => { - setData(results); - setIsLoading(false); + dispatch({ + type: 'SUCCESS', + payload: results, + }); }); @@ -23,3 +45,3 @@ - return children({data, isLoading}); + return children({data: state.data, isLoading: state.isLoading}); };
--- a/src/corrections/mainPracticeCorrection/components/dataFetcher.js +++ b/src/corrections/mainPracticeCorrection/components/dataFetcher.js @@ ... @@ const DataFetcher = ({children, getData, parameters}) => { - const [isLoading, setIsLoading] = React.useState(true); - const [data, setData] = React.useState(null); + const [state, dispatch] = React.useReducer( + (state, action) => { + switch (action.type) { + case 'FETCH_DATA': + return { + isLoading: true, + data: null, + }; + case 'SUCCESS': + return { + isLoading: false, + data: action.payload, + }; + default: + throw new Error('The reducer of DataFetch was called with the wrong action'); + } + }, + { + isLoading: true, + data: null, + } + ); @@ ... @@ if (parameters === null) return; - setIsLoading(true); + dispatch({type: 'FETCH_DATA'}); getData(parameters).then(results => { - setData(results); - setIsLoading(false); + dispatch({ + type: 'SUCCESS', + payload: results, + }); }); @@ ... @@ - return children({data, isLoading}); + return children({data: state.data, isLoading: state.isLoading}); };
--- a/src/corrections/mainPracticeCorrection/components/dataFetcher.js +++ b/src/corrections/mainPracticeCorrection/components/dataFetcher.js @@ -4,4 +4,24 @@ CON const DataFetcher = ({children, getData, parameters}) => { DEL const [isLoading, setIsLoading] = React.useState(true); DEL const [data, setData] = React.useState(null); ADD const [state, dispatch] = React.useReducer( ADD (state, action) => { ADD switch (action.type) { ADD case 'FETCH_DATA': ADD return { ADD isLoading: true, ADD data: null, ADD }; ADD case 'SUCCESS': ADD return { ADD isLoading: false, ADD data: action.payload, ADD }; ADD default: ADD throw new Error('The reducer of DataFetch was called with the wrong action'); ADD } ADD }, ADD { ADD isLoading: true, ADD data: null, ADD } ADD ); CON @@ -9,6 +29,8 @@ CON if (parameters === null) return; DEL setIsLoading(true); ADD dispatch({type: 'FETCH_DATA'}); CON getData(parameters).then(results => { DEL setData(results); DEL setIsLoading(false); ADD dispatch({ ADD type: 'SUCCESS', ADD payload: results, ADD }); CON }); @@ -23,3 +45,3 @@ CON DEL return children({data, isLoading}); ADD return children({data: state.data, isLoading: state.isLoading}); CON };
<<<<<<< SEARCH const DataFetcher = ({children, getData, parameters}) => { const [isLoading, setIsLoading] = React.useState(true); const [data, setData] = React.useState(null); const fetchData = parameters => { if (parameters === null) return; setIsLoading(true); getData(parameters).then(results => { setData(results); setIsLoading(false); }); }; ======= const DataFetcher = ({children, getData, parameters}) => { const [state, dispatch] = React.useReducer( (state, action) => { switch (action.type) { case 'FETCH_DATA': return { isLoading: true, data: null, }; case 'SUCCESS': return { isLoading: false, data: action.payload, }; default: throw new Error('The reducer of DataFetch was called with the wrong action'); } }, { isLoading: true, data: null, } ); const fetchData = parameters => { if (parameters === null) return; dispatch({type: 'FETCH_DATA'}); getData(parameters).then(results => { dispatch({ type: 'SUCCESS', payload: results, }); }); }; >>>>>>> REPLACE <<<<<<< SEARCH ); return children({data, isLoading}); }; ======= ); return children({data: state.data, isLoading: state.isLoading}); }; >>>>>>> REPLACE
toomasr/sgf4j-gui
cc8a30b5422fbc9d0db438ef9bc497686577bdc8
src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java
java
apache-2.0
Use new icons in the tree
package com.toomasr.sgf4j.filetree; import java.io.File; import javafx.scene.control.TreeCell; public class FileFormatCell extends TreeCell<File> { public FileFormatCell() { super(); } protected void updateItem(File item, boolean empty) { super.updateItem(item, empty); if (empty || item == null) { setText(null); setGraphic(null); } else { /* * For a root device on a Mac getName will return an empty string * but actually I'd like to show a slash designating the root device * */ if ("".equals(item.getName())) setText(item.getAbsolutePath()); else setText(item.getName()); } } }
package com.toomasr.sgf4j.filetree; import java.io.File; import com.toomasr.sgf4j.metasystem.MetaSystem; import com.toomasr.sgf4j.metasystem.ProblemStatus; import javafx.scene.control.TreeCell; import javafx.scene.image.Image; import javafx.scene.image.ImageView; public class FileFormatCell extends TreeCell<File> { private Image noneImage = new Image(getClass().getResourceAsStream("/icons/none_16x16.png")); private Image failedImage = new Image(getClass().getResourceAsStream("/icons/failed_16x16.png")); private Image solvedImage = new Image(getClass().getResourceAsStream("/icons/solved_16x16.png")); public FileFormatCell() { super(); } protected void updateItem(File file, boolean empty) { super.updateItem(file, empty); if (empty || file == null) { setText(null); setGraphic(null); } else { /* * For a root device on a Mac getName will return an empty string * but actually I'd like to show a slash designating the root device * */ if ("".equals(file.getName())) setText(file.getAbsolutePath()); /* * For SGF files we want to show some custom icons. */ else if (file != null && file.isFile() && file.toString().toLowerCase().endsWith("sgf")) { setText(file.getName()); if (MetaSystem.systemExists(file.toPath())) { ProblemStatus status = MetaSystem.getStatus(file.toPath()); if (status == ProblemStatus.NONE) setGraphic(new ImageView(noneImage)); else if (status == ProblemStatus.FAIL) setGraphic(new ImageView(failedImage)); else setGraphic(new ImageView(solvedImage)); } else { setGraphic(null); } } else { setText(file.getName()); setGraphic(null); } } } }
37
7
3
mixed
--- a/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java +++ b/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java @@ -4,5 +4,14 @@ +import com.toomasr.sgf4j.metasystem.MetaSystem; +import com.toomasr.sgf4j.metasystem.ProblemStatus; + import javafx.scene.control.TreeCell; +import javafx.scene.image.Image; +import javafx.scene.image.ImageView; public class FileFormatCell extends TreeCell<File> { + private Image noneImage = new Image(getClass().getResourceAsStream("/icons/none_16x16.png")); + private Image failedImage = new Image(getClass().getResourceAsStream("/icons/failed_16x16.png")); + private Image solvedImage = new Image(getClass().getResourceAsStream("/icons/solved_16x16.png")); + public FileFormatCell() { @@ -11,5 +20,5 @@ - protected void updateItem(File item, boolean empty) { - super.updateItem(item, empty); - if (empty || item == null) { + protected void updateItem(File file, boolean empty) { + super.updateItem(file, empty); + if (empty || file == null) { setText(null); @@ -23,6 +32,27 @@ */ - if ("".equals(item.getName())) - setText(item.getAbsolutePath()); - else - setText(item.getName()); + if ("".equals(file.getName())) + setText(file.getAbsolutePath()); + /* + * For SGF files we want to show some custom icons. + */ + else if (file != null && file.isFile() && file.toString().toLowerCase().endsWith("sgf")) { + setText(file.getName()); + if (MetaSystem.systemExists(file.toPath())) { + ProblemStatus status = MetaSystem.getStatus(file.toPath()); + + if (status == ProblemStatus.NONE) + setGraphic(new ImageView(noneImage)); + else if (status == ProblemStatus.FAIL) + setGraphic(new ImageView(failedImage)); + else + setGraphic(new ImageView(solvedImage)); + } + else { + setGraphic(null); + } + } + else { + setText(file.getName()); + setGraphic(null); + } }
--- a/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java +++ b/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java @@ ... @@ +import com.toomasr.sgf4j.metasystem.MetaSystem; +import com.toomasr.sgf4j.metasystem.ProblemStatus; + import javafx.scene.control.TreeCell; +import javafx.scene.image.Image; +import javafx.scene.image.ImageView; public class FileFormatCell extends TreeCell<File> { + private Image noneImage = new Image(getClass().getResourceAsStream("/icons/none_16x16.png")); + private Image failedImage = new Image(getClass().getResourceAsStream("/icons/failed_16x16.png")); + private Image solvedImage = new Image(getClass().getResourceAsStream("/icons/solved_16x16.png")); + public FileFormatCell() { @@ ... @@ - protected void updateItem(File item, boolean empty) { - super.updateItem(item, empty); - if (empty || item == null) { + protected void updateItem(File file, boolean empty) { + super.updateItem(file, empty); + if (empty || file == null) { setText(null); @@ ... @@ */ - if ("".equals(item.getName())) - setText(item.getAbsolutePath()); - else - setText(item.getName()); + if ("".equals(file.getName())) + setText(file.getAbsolutePath()); + /* + * For SGF files we want to show some custom icons. + */ + else if (file != null && file.isFile() && file.toString().toLowerCase().endsWith("sgf")) { + setText(file.getName()); + if (MetaSystem.systemExists(file.toPath())) { + ProblemStatus status = MetaSystem.getStatus(file.toPath()); + + if (status == ProblemStatus.NONE) + setGraphic(new ImageView(noneImage)); + else if (status == ProblemStatus.FAIL) + setGraphic(new ImageView(failedImage)); + else + setGraphic(new ImageView(solvedImage)); + } + else { + setGraphic(null); + } + } + else { + setText(file.getName()); + setGraphic(null); + } }
--- a/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java +++ b/src/main/java/com/toomasr/sgf4j/filetree/FileFormatCell.java @@ -4,5 +4,14 @@ CON ADD import com.toomasr.sgf4j.metasystem.MetaSystem; ADD import com.toomasr.sgf4j.metasystem.ProblemStatus; ADD CON import javafx.scene.control.TreeCell; ADD import javafx.scene.image.Image; ADD import javafx.scene.image.ImageView; CON CON public class FileFormatCell extends TreeCell<File> { ADD private Image noneImage = new Image(getClass().getResourceAsStream("/icons/none_16x16.png")); ADD private Image failedImage = new Image(getClass().getResourceAsStream("/icons/failed_16x16.png")); ADD private Image solvedImage = new Image(getClass().getResourceAsStream("/icons/solved_16x16.png")); ADD CON public FileFormatCell() { @@ -11,5 +20,5 @@ CON DEL protected void updateItem(File item, boolean empty) { DEL super.updateItem(item, empty); DEL if (empty || item == null) { ADD protected void updateItem(File file, boolean empty) { ADD super.updateItem(file, empty); ADD if (empty || file == null) { CON setText(null); @@ -23,6 +32,27 @@ CON */ DEL if ("".equals(item.getName())) DEL setText(item.getAbsolutePath()); DEL else DEL setText(item.getName()); ADD if ("".equals(file.getName())) ADD setText(file.getAbsolutePath()); ADD /* ADD * For SGF files we want to show some custom icons. ADD */ ADD else if (file != null && file.isFile() && file.toString().toLowerCase().endsWith("sgf")) { ADD setText(file.getName()); ADD if (MetaSystem.systemExists(file.toPath())) { ADD ProblemStatus status = MetaSystem.getStatus(file.toPath()); ADD ADD if (status == ProblemStatus.NONE) ADD setGraphic(new ImageView(noneImage)); ADD else if (status == ProblemStatus.FAIL) ADD setGraphic(new ImageView(failedImage)); ADD else ADD setGraphic(new ImageView(solvedImage)); ADD } ADD else { ADD setGraphic(null); ADD } ADD } ADD else { ADD setText(file.getName()); ADD setGraphic(null); ADD } CON }
<<<<<<< SEARCH import java.io.File; import javafx.scene.control.TreeCell; public class FileFormatCell extends TreeCell<File> { public FileFormatCell() { super(); } protected void updateItem(File item, boolean empty) { super.updateItem(item, empty); if (empty || item == null) { setText(null); setGraphic(null); ======= import java.io.File; import com.toomasr.sgf4j.metasystem.MetaSystem; import com.toomasr.sgf4j.metasystem.ProblemStatus; import javafx.scene.control.TreeCell; import javafx.scene.image.Image; import javafx.scene.image.ImageView; public class FileFormatCell extends TreeCell<File> { private Image noneImage = new Image(getClass().getResourceAsStream("/icons/none_16x16.png")); private Image failedImage = new Image(getClass().getResourceAsStream("/icons/failed_16x16.png")); private Image solvedImage = new Image(getClass().getResourceAsStream("/icons/solved_16x16.png")); public FileFormatCell() { super(); } protected void updateItem(File file, boolean empty) { super.updateItem(file, empty); if (empty || file == null) { setText(null); setGraphic(null); >>>>>>> REPLACE <<<<<<< SEARCH * */ if ("".equals(item.getName())) setText(item.getAbsolutePath()); else setText(item.getName()); } } ======= * */ if ("".equals(file.getName())) setText(file.getAbsolutePath()); /* * For SGF files we want to show some custom icons. */ else if (file != null && file.isFile() && file.toString().toLowerCase().endsWith("sgf")) { setText(file.getName()); if (MetaSystem.systemExists(file.toPath())) { ProblemStatus status = MetaSystem.getStatus(file.toPath()); if (status == ProblemStatus.NONE) setGraphic(new ImageView(noneImage)); else if (status == ProblemStatus.FAIL) setGraphic(new ImageView(failedImage)); else setGraphic(new ImageView(solvedImage)); } else { setGraphic(null); } } else { setText(file.getName()); setGraphic(null); } } } >>>>>>> REPLACE
gradle/gradle
fde3eca847096c8255d0a582fa662285c9ef231b
subprojects/files/files.gradle.kts
kotlin
apache-2.0
Remove redundant declaration of testFixturesImplementation
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` // Some cycles have been inherited from the time these classes were in :core // id("gradlebuild.classycle") } dependencies { api(project(":baseServices")) api(project(":baseServicesGroovy")) api(project(":coreApi")) api(project(":modelCore")) api(library("guava")) api(library("jsr305")) api(library("inject")) implementation(project(":logging")) implementation(project(":native")) implementation(library("commons_io")) implementation(library("commons_lang")) testImplementation(project(":internalTesting")) val testFixturesImplementation by configurations testFixturesImplementation(project(":internalTesting")) } java { gradlebuildJava { moduleType = ModuleType.ENTRY_POINT } } testFixtures { from(":core") from(":coreApi") }
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.gradle.gradlebuild.unittestandcompile.ModuleType plugins { `java-library` // Some cycles have been inherited from the time these classes were in :core // id("gradlebuild.classycle") } dependencies { api(project(":baseServices")) api(project(":baseServicesGroovy")) api(project(":coreApi")) api(project(":modelCore")) api(library("guava")) api(library("jsr305")) api(library("inject")) implementation(project(":logging")) implementation(project(":native")) implementation(library("commons_io")) implementation(library("commons_lang")) testImplementation(project(":internalTesting")) testFixturesImplementation(project(":internalTesting")) } java { gradlebuildJava { moduleType = ModuleType.ENTRY_POINT } } testFixtures { from(":core") from(":coreApi") }
0
2
1
del_only
--- a/subprojects/files/files.gradle.kts +++ b/subprojects/files/files.gradle.kts @@ -38,4 +38,2 @@ testImplementation(project(":internalTesting")) - - val testFixturesImplementation by configurations testFixturesImplementation(project(":internalTesting"))
--- a/subprojects/files/files.gradle.kts +++ b/subprojects/files/files.gradle.kts @@ ... @@ testImplementation(project(":internalTesting")) - - val testFixturesImplementation by configurations testFixturesImplementation(project(":internalTesting"))
--- a/subprojects/files/files.gradle.kts +++ b/subprojects/files/files.gradle.kts @@ -38,4 +38,2 @@ CON testImplementation(project(":internalTesting")) DEL DEL val testFixturesImplementation by configurations CON testFixturesImplementation(project(":internalTesting"))
<<<<<<< SEARCH testImplementation(project(":internalTesting")) val testFixturesImplementation by configurations testFixturesImplementation(project(":internalTesting")) } ======= testImplementation(project(":internalTesting")) testFixturesImplementation(project(":internalTesting")) } >>>>>>> REPLACE
opengeogroep/safetymaps-server
40815bdf4e41cc035fe8eb915ef2fa2126b25935
src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java
java
agpl-3.0
Add returnTo param to normal login request
package nl.opengeogroep.safetymaps.server.stripes; import net.sourceforge.stripes.action.ActionBean; import net.sourceforge.stripes.action.ActionBeanContext; import net.sourceforge.stripes.action.Resolution; import net.sourceforge.stripes.action.StreamingResolution; import net.sourceforge.stripes.action.UrlBinding; /** * * @author matthijsln */ @UrlBinding("/viewer/api/login") public class LoginActionBean implements ActionBean { private ActionBeanContext context; @Override public ActionBeanContext getContext() { return context; } @Override public void setContext(ActionBeanContext context) { this.context = context; } public Resolution redirect() { return new StreamingResolution("text/html", "<html><head>" + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + "/viewer/\">" + "</head></html>" ); } }
package nl.opengeogroep.safetymaps.server.stripes; import net.sourceforge.stripes.action.ActionBean; import net.sourceforge.stripes.action.ActionBeanContext; import net.sourceforge.stripes.action.Resolution; import net.sourceforge.stripes.action.StreamingResolution; import net.sourceforge.stripes.action.UrlBinding; /** * * @author matthijsln */ @UrlBinding("/viewer/api/login") public class LoginActionBean implements ActionBean { private ActionBeanContext context; @Override public ActionBeanContext getContext() { return context; } @Override public void setContext(ActionBeanContext context) { this.context = context; } public Resolution redirect() { String returnTo = request.getParameter("returnTo"); if (returnTo == null || returnTo.length() == 0) { returnTo = "/viewer/"; } return new StreamingResolution("text/html", "<html><head>" + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + returnTo + "\">" + "</head></html>" ); } }
7
1
1
mixed
--- a/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java +++ b/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java @@ -27,5 +27,11 @@ public Resolution redirect() { + String returnTo = request.getParameter("returnTo"); + + if (returnTo == null || returnTo.length() == 0) { + returnTo = "/viewer/"; + } + return new StreamingResolution("text/html", "<html><head>" + - "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + "/viewer/\">" + + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + returnTo + "\">" + "</head></html>"
--- a/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java +++ b/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java @@ ... @@ public Resolution redirect() { + String returnTo = request.getParameter("returnTo"); + + if (returnTo == null || returnTo.length() == 0) { + returnTo = "/viewer/"; + } + return new StreamingResolution("text/html", "<html><head>" + - "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + "/viewer/\">" + + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + returnTo + "\">" + "</head></html>"
--- a/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java +++ b/src/main/java/nl/opengeogroep/safetymaps/server/stripes/LoginActionBean.java @@ -27,5 +27,11 @@ CON public Resolution redirect() { ADD String returnTo = request.getParameter("returnTo"); ADD ADD if (returnTo == null || returnTo.length() == 0) { ADD returnTo = "/viewer/"; ADD } ADD CON return new StreamingResolution("text/html", CON "<html><head>" + DEL "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + "/viewer/\">" + ADD "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + returnTo + "\">" + CON "</head></html>"
<<<<<<< SEARCH public Resolution redirect() { return new StreamingResolution("text/html", "<html><head>" + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + "/viewer/\">" + "</head></html>" ); ======= public Resolution redirect() { String returnTo = request.getParameter("returnTo"); if (returnTo == null || returnTo.length() == 0) { returnTo = "/viewer/"; } return new StreamingResolution("text/html", "<html><head>" + "<meta http-equiv=\"refresh\" content=\"0;url=" + context.getRequest().getContextPath() + returnTo + "\">" + "</head></html>" ); >>>>>>> REPLACE
SICU-Stress-Measurement-System/frontend-java
a5414380529705ae19e5332fa9fd32ede3290910
src/main/java/edu/cwru/sicu_sms/Main.java
java
apache-2.0
Revise flow of 'start' method
/* *\ ** SICU Stress Measurement System ** ** Project P04 | C380 Team A ** ** EBME 380: Biomedical Engineering Design Experience ** ** Case Western Reserve University ** ** 2016 Fall Semester ** \* */ package edu.cwru.sicu_sms; import javafx.application.Application; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.image.Image; import javafx.stage.Stage; /** * The main stage of the front-end program for interfacing with the electronic system. * * @since September 27, 2016 * @author Ted Frohlich <[email protected]> * @author Abby Walker <[email protected]> */ public class Main extends Application { @Override public void start(Stage primaryStage) throws Exception { Parent root = FXMLLoader.load( getClass().getResource("scene/sicu_sms.fxml") ); primaryStage.getIcons().add(new Image( "http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); primaryStage.setTitle("SICU Stress Measurement System"); primaryStage.setScene(new Scene(root)); primaryStage.setMaximized(true); primaryStage.show(); } public static void main(String[] args) { launch(args); } }
/* *\ ** SICU Stress Measurement System ** ** Project P04 | C380 Team A ** ** EBME 380: Biomedical Engineering Design Experience ** ** Case Western Reserve University ** ** 2016 Fall Semester ** \* */ package edu.cwru.sicu_sms; import javafx.application.Application; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.image.Image; import javafx.stage.Stage; /** * The main stage of the front-end program for interfacing with the electronic system. * * @since September 27, 2016 * @author Ted Frohlich <[email protected]> * @author Abby Walker <[email protected]> */ public class Main extends Application { @Override public void start(Stage primaryStage) throws Exception { primaryStage.setScene(new Scene( FXMLLoader.load(getClass().getResource("scene/sicu_sms.fxml")))); primaryStage.setTitle("SICU Stress Measurement System"); primaryStage.getIcons() .add(new Image("http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); primaryStage.setMaximized(true); primaryStage.show(); } public static void main(String[] args) { launch(args); } }
6
7
2
mixed
--- a/src/main/java/edu/cwru/sicu_sms/Main.java +++ b/src/main/java/edu/cwru/sicu_sms/Main.java @@ -12,3 +12,2 @@ import javafx.fxml.FXMLLoader; -import javafx.scene.Parent; import javafx.scene.Scene; @@ -28,9 +27,9 @@ public void start(Stage primaryStage) throws Exception { - Parent root = FXMLLoader.load( - getClass().getResource("scene/sicu_sms.fxml") - ); - primaryStage.getIcons().add(new Image( - "http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); + primaryStage.setScene(new Scene( + FXMLLoader.load(getClass().getResource("scene/sicu_sms.fxml")))); + primaryStage.setTitle("SICU Stress Measurement System"); - primaryStage.setScene(new Scene(root)); + primaryStage.getIcons() + .add(new Image("http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); + primaryStage.setMaximized(true);
--- a/src/main/java/edu/cwru/sicu_sms/Main.java +++ b/src/main/java/edu/cwru/sicu_sms/Main.java @@ ... @@ import javafx.fxml.FXMLLoader; -import javafx.scene.Parent; import javafx.scene.Scene; @@ ... @@ public void start(Stage primaryStage) throws Exception { - Parent root = FXMLLoader.load( - getClass().getResource("scene/sicu_sms.fxml") - ); - primaryStage.getIcons().add(new Image( - "http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); + primaryStage.setScene(new Scene( + FXMLLoader.load(getClass().getResource("scene/sicu_sms.fxml")))); + primaryStage.setTitle("SICU Stress Measurement System"); - primaryStage.setScene(new Scene(root)); + primaryStage.getIcons() + .add(new Image("http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); + primaryStage.setMaximized(true);
--- a/src/main/java/edu/cwru/sicu_sms/Main.java +++ b/src/main/java/edu/cwru/sicu_sms/Main.java @@ -12,3 +12,2 @@ CON import javafx.fxml.FXMLLoader; DEL import javafx.scene.Parent; CON import javafx.scene.Scene; @@ -28,9 +27,9 @@ CON public void start(Stage primaryStage) throws Exception { DEL Parent root = FXMLLoader.load( DEL getClass().getResource("scene/sicu_sms.fxml") DEL ); DEL primaryStage.getIcons().add(new Image( DEL "http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); ADD primaryStage.setScene(new Scene( ADD FXMLLoader.load(getClass().getResource("scene/sicu_sms.fxml")))); ADD CON primaryStage.setTitle("SICU Stress Measurement System"); DEL primaryStage.setScene(new Scene(root)); ADD primaryStage.getIcons() ADD .add(new Image("http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); ADD CON primaryStage.setMaximized(true);
<<<<<<< SEARCH import javafx.application.Application; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.image.Image; ======= import javafx.application.Application; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.image.Image; >>>>>>> REPLACE <<<<<<< SEARCH @Override public void start(Stage primaryStage) throws Exception { Parent root = FXMLLoader.load( getClass().getResource("scene/sicu_sms.fxml") ); primaryStage.getIcons().add(new Image( "http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); primaryStage.setTitle("SICU Stress Measurement System"); primaryStage.setScene(new Scene(root)); primaryStage.setMaximized(true); primaryStage.show(); ======= @Override public void start(Stage primaryStage) throws Exception { primaryStage.setScene(new Scene( FXMLLoader.load(getClass().getResource("scene/sicu_sms.fxml")))); primaryStage.setTitle("SICU Stress Measurement System"); primaryStage.getIcons() .add(new Image("http://s3.amazonaws.com/libapps/customers/558/images/CWRU_Logo.jpg")); primaryStage.setMaximized(true); primaryStage.show(); >>>>>>> REPLACE
burtonageo/pascal_string
12fb91bafee024b3775c11eaa39cf348c4d7db11
src/utf8/mod.rs
rust
mit
Add some tests for utf8 string
#![allow(missing_docs, unused_variables)] mod pascal_str; mod pascal_string; pub use self::pascal_str::{Chars, Bytes, Lines, PascalStr}; pub use self::pascal_string::PascalString; #[cfg(test)] mod tests { }
#![allow(missing_docs, unused_variables)] mod pascal_str; mod pascal_string; pub use self::pascal_str::{Chars, Bytes, Lines, PascalStr}; pub use self::pascal_string::PascalString; #[cfg(test)] mod tests { use super::*; use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::iter::IntoIterator; /* #[test] fn test_string_creation() { let test = "Hello, my world!".to_owned(); let test_pascal = PascalString::from(&test).unwrap(); assert_eq!(&test, test_pascal.as_str()); let too_many_bytes = vec![12u8; 256]; assert!(match PascalString::from(&too_many_bytes) { Err(PascalStringCreateError::InputTooLong) => true, _ => false }); } */ #[test] fn test_character_append() { let mut string = PascalString::new(); assert!(string.try_push('h').is_ok()); string.push('e'); string.push(76u8 as char); string.push('l'); string.push('L'); string.push('o'); string.push('\0'); assert_eq!(string.as_str(), "heLlLo\0"); } }
33
0
1
add_only
--- a/src/utf8/mod.rs +++ b/src/utf8/mod.rs @@ -10,2 +10,35 @@ mod tests { + use super::*; + use std::borrow::Cow; + use std::ffi::{CStr, CString}; + use std::iter::IntoIterator; + + /* + #[test] + fn test_string_creation() { + let test = "Hello, my world!".to_owned(); + let test_pascal = PascalString::from(&test).unwrap(); + assert_eq!(&test, test_pascal.as_str()); + + let too_many_bytes = vec![12u8; 256]; + assert!(match PascalString::from(&too_many_bytes) { + Err(PascalStringCreateError::InputTooLong) => true, + _ => false + }); + } + */ + + #[test] + fn test_character_append() { + let mut string = PascalString::new(); + assert!(string.try_push('h').is_ok()); + string.push('e'); + string.push(76u8 as char); + string.push('l'); + string.push('L'); + string.push('o'); + string.push('\0'); + + assert_eq!(string.as_str(), "heLlLo\0"); + } }
--- a/src/utf8/mod.rs +++ b/src/utf8/mod.rs @@ ... @@ mod tests { + use super::*; + use std::borrow::Cow; + use std::ffi::{CStr, CString}; + use std::iter::IntoIterator; + + /* + #[test] + fn test_string_creation() { + let test = "Hello, my world!".to_owned(); + let test_pascal = PascalString::from(&test).unwrap(); + assert_eq!(&test, test_pascal.as_str()); + + let too_many_bytes = vec![12u8; 256]; + assert!(match PascalString::from(&too_many_bytes) { + Err(PascalStringCreateError::InputTooLong) => true, + _ => false + }); + } + */ + + #[test] + fn test_character_append() { + let mut string = PascalString::new(); + assert!(string.try_push('h').is_ok()); + string.push('e'); + string.push(76u8 as char); + string.push('l'); + string.push('L'); + string.push('o'); + string.push('\0'); + + assert_eq!(string.as_str(), "heLlLo\0"); + } }
--- a/src/utf8/mod.rs +++ b/src/utf8/mod.rs @@ -10,2 +10,35 @@ CON mod tests { ADD use super::*; ADD use std::borrow::Cow; ADD use std::ffi::{CStr, CString}; ADD use std::iter::IntoIterator; ADD ADD /* ADD #[test] ADD fn test_string_creation() { ADD let test = "Hello, my world!".to_owned(); ADD let test_pascal = PascalString::from(&test).unwrap(); ADD assert_eq!(&test, test_pascal.as_str()); ADD ADD let too_many_bytes = vec![12u8; 256]; ADD assert!(match PascalString::from(&too_many_bytes) { ADD Err(PascalStringCreateError::InputTooLong) => true, ADD _ => false ADD }); ADD } ADD */ ADD ADD #[test] ADD fn test_character_append() { ADD let mut string = PascalString::new(); ADD assert!(string.try_push('h').is_ok()); ADD string.push('e'); ADD string.push(76u8 as char); ADD string.push('l'); ADD string.push('L'); ADD string.push('o'); ADD string.push('\0'); ADD ADD assert_eq!(string.as_str(), "heLlLo\0"); ADD } CON }
<<<<<<< SEARCH #[cfg(test)] mod tests { } ======= #[cfg(test)] mod tests { use super::*; use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::iter::IntoIterator; /* #[test] fn test_string_creation() { let test = "Hello, my world!".to_owned(); let test_pascal = PascalString::from(&test).unwrap(); assert_eq!(&test, test_pascal.as_str()); let too_many_bytes = vec![12u8; 256]; assert!(match PascalString::from(&too_many_bytes) { Err(PascalStringCreateError::InputTooLong) => true, _ => false }); } */ #[test] fn test_character_append() { let mut string = PascalString::new(); assert!(string.try_push('h').is_ok()); string.push('e'); string.push(76u8 as char); string.push('l'); string.push('L'); string.push('o'); string.push('\0'); assert_eq!(string.as_str(), "heLlLo\0"); } } >>>>>>> REPLACE
netroby/gerrit
9e8343c1350d8cfd1e1b46ff327d55958fd2eafa
gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java
java
apache-2.0
Fix @see to external class in api to allow compilation with Java 8 The recently added GarbageCollectorListener came with a @see to a JGit class. While Java 7 did not error out on that, Java 8 does: [...]/events/GarbageCollectorListener.java:34: error: reference not found * @see org.eclipse.jgit.api.GarbageCollectCommand#call() ^ As Java 8's javadoc currently (1.8.0_45) even complains when adding a “-link”, we work around the issue by transforming the @see to the classname into a @see with a full <a/>. Change-Id: I1c061d5219b4fe8818d3fca8a56b4a433aa557d4
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.extensions.events; import com.google.gerrit.extensions.annotations.ExtensionPoint; import java.util.Properties; /** * Notified whenever the garbage collector has run successfully on a project. */ @ExtensionPoint public interface GarbageCollectorListener { public interface Event { /** @return The name of the project that has been garbage collected. */ String getProjectName(); /** * Properties describing the result of the garbage collection performed by * JGit * * @see org.eclipse.jgit.api.GarbageCollectCommand#call() */ Properties getStatistics(); } void onGarbageCollected(Event event); }
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.extensions.events; import com.google.gerrit.extensions.annotations.ExtensionPoint; import java.util.Properties; /** * Notified whenever the garbage collector has run successfully on a project. */ @ExtensionPoint public interface GarbageCollectorListener { public interface Event { /** @return The name of the project that has been garbage collected. */ String getProjectName(); /** * Properties describing the result of the garbage collection performed by * JGit * * @see <a href="http://download.eclipse.org/jgit/site/3.7.0.201502260915-r/apidocs/org/eclipse/jgit/api/GarbageCollectCommand.html#call%28%29">GarbageCollectCommand</a> */ Properties getStatistics(); } void onGarbageCollected(Event event); }
1
1
1
mixed
--- a/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java +++ b/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java @@ -33,3 +33,3 @@ * - * @see org.eclipse.jgit.api.GarbageCollectCommand#call() + * @see <a href="http://download.eclipse.org/jgit/site/3.7.0.201502260915-r/apidocs/org/eclipse/jgit/api/GarbageCollectCommand.html#call%28%29">GarbageCollectCommand</a> */
--- a/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java +++ b/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java @@ ... @@ * - * @see org.eclipse.jgit.api.GarbageCollectCommand#call() + * @see <a href="http://download.eclipse.org/jgit/site/3.7.0.201502260915-r/apidocs/org/eclipse/jgit/api/GarbageCollectCommand.html#call%28%29">GarbageCollectCommand</a> */
--- a/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java +++ b/gerrit-extension-api/src/main/java/com/google/gerrit/extensions/events/GarbageCollectorListener.java @@ -33,3 +33,3 @@ CON * DEL * @see org.eclipse.jgit.api.GarbageCollectCommand#call() ADD * @see <a href="http://download.eclipse.org/jgit/site/3.7.0.201502260915-r/apidocs/org/eclipse/jgit/api/GarbageCollectCommand.html#call%28%29">GarbageCollectCommand</a> CON */
<<<<<<< SEARCH * JGit * * @see org.eclipse.jgit.api.GarbageCollectCommand#call() */ Properties getStatistics(); ======= * JGit * * @see <a href="http://download.eclipse.org/jgit/site/3.7.0.201502260915-r/apidocs/org/eclipse/jgit/api/GarbageCollectCommand.html#call%28%29">GarbageCollectCommand</a> */ Properties getStatistics(); >>>>>>> REPLACE
celery/cell
a24c657ca84e553a39e23d201d605d84d828c322
examples/hello.py
python
bsd-3-clause
Use the Server class (an Actor derived class)
from cell import Actor, Agent from cell.actors import Server from kombu import Connection from kombu.log import setup_logging connection = Connection() class GreetingActor(Server): default_routing_key = 'GreetingActor' class state: def greet(self, who='world'): return 'Hello %s' % who greeting = GreetingActor(connection) class Printer(Actor): default_routing_key = 'Printer' class state: def echo(self, msg = 'test'): print 'I am a printer:',msg #self.output_edge.send(msg) return msg printerActor = Printer(connection) class Ihu(Actor): default_routing_key = 'Printer' class state: def temp(self, msg = 'blabla'): self.output_server.send(msg) class GreetingAgent(Agent): actors = [greeting, printerActor] if __name__ == '__main__': consumer = GreetingAgent(connection).consume_from_commandline() for _ in consumer: print 'Received' # Run this script from the command line and try this # in another console: # # >>> from hello import greeting # >>> greeting.call('greet') # 'Hello world'
from cell import Actor, Agent from cell.actors import Server from kombu import Connection from kombu.log import setup_logging connection = Connection() class GreetingActor(Server): default_routing_key = 'GreetingActor' class state: def greet(self, who='world'): return 'Hello %s' % who greeting = GreetingActor(connection) class GreetingAgent(Agent): actors = [greeting] if __name__ == '__main__': GreetingAgent(connection).consume_from_commandline() # Run this script from the command line and try this # in another console: # # >>> from hello import greeting # >>> greeting.call('greet') # 'Hello world'
3
27
2
mixed
--- a/examples/hello.py +++ b/examples/hello.py @@ -13,3 +13,2 @@ class state: - def greet(self, who='world'): @@ -17,33 +16,10 @@ greeting = GreetingActor(connection) + -class Printer(Actor): - default_routing_key = 'Printer' - - class state: - def echo(self, msg = 'test'): - print 'I am a printer:',msg - #self.output_edge.send(msg) - return msg - -printerActor = Printer(connection) - - - -class Ihu(Actor): - default_routing_key = 'Printer' - - class state: - def temp(self, msg = 'blabla'): - self.output_server.send(msg) - - class GreetingAgent(Agent): - actors = [greeting, printerActor] + actors = [greeting] if __name__ == '__main__': - consumer = GreetingAgent(connection).consume_from_commandline() - for _ in consumer: - print 'Received' - + GreetingAgent(connection).consume_from_commandline() # Run this script from the command line and try this
--- a/examples/hello.py +++ b/examples/hello.py @@ ... @@ class state: - def greet(self, who='world'): @@ ... @@ greeting = GreetingActor(connection) + -class Printer(Actor): - default_routing_key = 'Printer' - - class state: - def echo(self, msg = 'test'): - print 'I am a printer:',msg - #self.output_edge.send(msg) - return msg - -printerActor = Printer(connection) - - - -class Ihu(Actor): - default_routing_key = 'Printer' - - class state: - def temp(self, msg = 'blabla'): - self.output_server.send(msg) - - class GreetingAgent(Agent): - actors = [greeting, printerActor] + actors = [greeting] if __name__ == '__main__': - consumer = GreetingAgent(connection).consume_from_commandline() - for _ in consumer: - print 'Received' - + GreetingAgent(connection).consume_from_commandline() # Run this script from the command line and try this
--- a/examples/hello.py +++ b/examples/hello.py @@ -13,3 +13,2 @@ CON class state: DEL CON def greet(self, who='world'): @@ -17,33 +16,10 @@ CON greeting = GreetingActor(connection) ADD CON CON DEL class Printer(Actor): DEL default_routing_key = 'Printer' DEL DEL class state: DEL def echo(self, msg = 'test'): DEL print 'I am a printer:',msg DEL #self.output_edge.send(msg) DEL return msg DEL DEL printerActor = Printer(connection) DEL DEL DEL DEL class Ihu(Actor): DEL default_routing_key = 'Printer' DEL DEL class state: DEL def temp(self, msg = 'blabla'): DEL self.output_server.send(msg) DEL DEL CON class GreetingAgent(Agent): DEL actors = [greeting, printerActor] ADD actors = [greeting] CON CON if __name__ == '__main__': DEL consumer = GreetingAgent(connection).consume_from_commandline() DEL for _ in consumer: DEL print 'Received' DEL ADD GreetingAgent(connection).consume_from_commandline() CON # Run this script from the command line and try this
<<<<<<< SEARCH class state: def greet(self, who='world'): return 'Hello %s' % who greeting = GreetingActor(connection) class Printer(Actor): default_routing_key = 'Printer' class state: def echo(self, msg = 'test'): print 'I am a printer:',msg #self.output_edge.send(msg) return msg printerActor = Printer(connection) class Ihu(Actor): default_routing_key = 'Printer' class state: def temp(self, msg = 'blabla'): self.output_server.send(msg) class GreetingAgent(Agent): actors = [greeting, printerActor] if __name__ == '__main__': consumer = GreetingAgent(connection).consume_from_commandline() for _ in consumer: print 'Received' # Run this script from the command line and try this # in another console: ======= class state: def greet(self, who='world'): return 'Hello %s' % who greeting = GreetingActor(connection) class GreetingAgent(Agent): actors = [greeting] if __name__ == '__main__': GreetingAgent(connection).consume_from_commandline() # Run this script from the command line and try this # in another console: >>>>>>> REPLACE
mayfield/ecmcli
9d58310b2106e3ed5fc140a8479e003a4a647e82
setup.py
python
mit
Fix shellish requirement bump rev to 2.3.1
#!/usr/bin/env python from setuptools import setup, find_packages README = 'README.md' def long_desc(): try: import pypandoc except ImportError: with open(README) as f: return f.read() else: return pypandoc.convert(README, 'rst') setup( name='ecmcli', version='2.3.0', description='Command Line Interface for Cradlepoint ECM', author='Justin Mayfield', author_email='[email protected]', url='https://github.com/mayfield/ecmcli/', license='MIT', long_description=long_desc(), packages=find_packages(), install_requires=[ 'syndicate==1.2.0', 'shellish>=0.5.9', 'humanize' ], entry_points = { 'console_scripts': ['ecm=ecmcli.main:main'], }, include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', ] )
#!/usr/bin/env python from setuptools import setup, find_packages README = 'README.md' def long_desc(): try: import pypandoc except ImportError: with open(README) as f: return f.read() else: return pypandoc.convert(README, 'rst') setup( name='ecmcli', version='2.3.1', description='Command Line Interface for Cradlepoint ECM', author='Justin Mayfield', author_email='[email protected]', url='https://github.com/mayfield/ecmcli/', license='MIT', long_description=long_desc(), packages=find_packages(), install_requires=[ 'syndicate==1.2.0', 'shellish>=0.6.0', 'humanize' ], entry_points = { 'console_scripts': ['ecm=ecmcli.main:main'], }, include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.4', ] )
2
2
2
mixed
--- a/setup.py +++ b/setup.py @@ -18,3 +18,3 @@ name='ecmcli', - version='2.3.0', + version='2.3.1', description='Command Line Interface for Cradlepoint ECM', @@ -28,3 +28,3 @@ 'syndicate==1.2.0', - 'shellish>=0.5.9', + 'shellish>=0.6.0', 'humanize'
--- a/setup.py +++ b/setup.py @@ ... @@ name='ecmcli', - version='2.3.0', + version='2.3.1', description='Command Line Interface for Cradlepoint ECM', @@ ... @@ 'syndicate==1.2.0', - 'shellish>=0.5.9', + 'shellish>=0.6.0', 'humanize'
--- a/setup.py +++ b/setup.py @@ -18,3 +18,3 @@ CON name='ecmcli', DEL version='2.3.0', ADD version='2.3.1', CON description='Command Line Interface for Cradlepoint ECM', @@ -28,3 +28,3 @@ CON 'syndicate==1.2.0', DEL 'shellish>=0.5.9', ADD 'shellish>=0.6.0', CON 'humanize'
<<<<<<< SEARCH setup( name='ecmcli', version='2.3.0', description='Command Line Interface for Cradlepoint ECM', author='Justin Mayfield', ======= setup( name='ecmcli', version='2.3.1', description='Command Line Interface for Cradlepoint ECM', author='Justin Mayfield', >>>>>>> REPLACE <<<<<<< SEARCH install_requires=[ 'syndicate==1.2.0', 'shellish>=0.5.9', 'humanize' ], ======= install_requires=[ 'syndicate==1.2.0', 'shellish>=0.6.0', 'humanize' ], >>>>>>> REPLACE
dariost/dazzle
df317912a09681742964f66bd6b92b505f0a2207
src/chasher.rs
rust
mpl-2.0
Make the player id at most 31 bit
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright 2017 - Dario Ostuni <[email protected]> * */ use common::PlayerInfo; use std::default::Default; use std::hash::Hasher; use twox_hash::XxHash; pub struct CHasher { internal: XxHash, } impl Default for CHasher { fn default() -> CHasher { CHasher::new() } } impl CHasher { pub fn new() -> CHasher { CHasher { internal: XxHash::with_seed(0) } } pub fn update(&mut self, data: &[u8]) { self.internal.write(data); } pub fn finalize(self) -> u64 { self.internal.finish() >> 32 } } pub fn player_hash(player_info: &PlayerInfo) -> u64 { let mut hasher = CHasher::new(); hasher.update(player_info.name.as_str().as_bytes()); hasher.finalize() }
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright 2017 - Dario Ostuni <[email protected]> * */ use common::PlayerInfo; use std::default::Default; use std::hash::Hasher; use twox_hash::XxHash; pub struct CHasher { internal: XxHash, } impl Default for CHasher { fn default() -> CHasher { CHasher::new() } } impl CHasher { pub fn new() -> CHasher { CHasher { internal: XxHash::with_seed(0) } } pub fn update(&mut self, data: &[u8]) { self.internal.write(data); } pub fn finalize(self) -> u64 { self.internal.finish() >> 33 } } pub fn player_hash(player_info: &PlayerInfo) -> u64 { let mut hasher = CHasher::new(); hasher.update(player_info.name.as_str().as_bytes()); hasher.finalize() }
1
1
1
mixed
--- a/src/chasher.rs +++ b/src/chasher.rs @@ -41,3 +41,3 @@ { - self.internal.finish() >> 32 + self.internal.finish() >> 33 }
--- a/src/chasher.rs +++ b/src/chasher.rs @@ ... @@ { - self.internal.finish() >> 32 + self.internal.finish() >> 33 }
--- a/src/chasher.rs +++ b/src/chasher.rs @@ -41,3 +41,3 @@ CON { DEL self.internal.finish() >> 32 ADD self.internal.finish() >> 33 CON }
<<<<<<< SEARCH pub fn finalize(self) -> u64 { self.internal.finish() >> 32 } } ======= pub fn finalize(self) -> u64 { self.internal.finish() >> 33 } } >>>>>>> REPLACE
AyaNakazawa/business_card_bank
64d64599eada4d1831f31cf22b3cb6293736106a
js/myapp/login.js
javascript
mit
Add some template selectorin LoginModel
// ---------------------------------------------------------------- // Login Class class LoginModel extends SwitchModel { constructor({ name, lsKeyView, triggerSelector, switchSelector } = {}) { super({ name: name, lsKeyView: lsKeyView, triggerSelector: triggerSelector, switchSelector: switchSelector }); } } class LoginView extends SwitchView { constructor(_model = new LoginModel()) { super(_model); } } // ---------------------------------------------------------------- // Controller class LoginController extends CommonController { constructor(_obj) { super(_obj); this.model = new LoginModel(_obj); this.view = new LoginView(this.model); } } // ---------------------------------------------------------------- // Event class LoginEvent extends CommonEvent { constructor({ name = 'Login Event' } = {}) { super({ name: name }); this.NAME = name; this.CONTROLLER = new LoginController({ name: 'Login Switch', lsKeyView: 'login', triggerSelector: '#action-login', switchSelector: '#login-area' }); } }
// ---------------------------------------------------------------- // Login Class class LoginModel extends SwitchModel { constructor({ name, lsKeyView, triggerSelector, switchSelector } = {}) { super({ name: name, lsKeyView: lsKeyView, triggerSelector: triggerSelector, switchSelector: switchSelector }); this.LOGIN_AREA_SELECTOR = '#login-area'; this.$LOGIN_AREA_SELECTOR = $(this.LOGIN_AREA_SELECTOR); this.TEMPLATE_LOGINED_SELECTOR = '#logined-template'; this.$TEMPLATE_LOGINED_SELECTOR = $(this.TEMPLATE_LOGINED_SELECTOR); this.TEMPLATE_NOT_LOGIN_SELECTOR = '#not-login-template'; this.$TEMPLATE_NOT_LOGIN_SELECTOR = $(this.TEMPLATE_NOT_LOGIN_SELECTOR); } } class LoginView extends SwitchView { constructor(_model = new LoginModel()) { super(_model); } } // ---------------------------------------------------------------- // Controller class LoginController extends CommonController { constructor(_obj) { super(_obj); this.model = new LoginModel(_obj); this.view = new LoginView(this.model); } } // ---------------------------------------------------------------- // Event class LoginEvent extends CommonEvent { constructor({ name = 'Login Event' } = {}) { super({ name: name }); this.NAME = name; this.CONTROLLER = new LoginController({ name: 'Login Switch', lsKeyView: 'login', triggerSelector: '#action-login', switchSelector: '#login-area' }); } }
7
0
1
add_only
--- a/js/myapp/login.js +++ b/js/myapp/login.js @@ -17,2 +17,9 @@ }); + + this.LOGIN_AREA_SELECTOR = '#login-area'; + this.$LOGIN_AREA_SELECTOR = $(this.LOGIN_AREA_SELECTOR); + this.TEMPLATE_LOGINED_SELECTOR = '#logined-template'; + this.$TEMPLATE_LOGINED_SELECTOR = $(this.TEMPLATE_LOGINED_SELECTOR); + this.TEMPLATE_NOT_LOGIN_SELECTOR = '#not-login-template'; + this.$TEMPLATE_NOT_LOGIN_SELECTOR = $(this.TEMPLATE_NOT_LOGIN_SELECTOR); }
--- a/js/myapp/login.js +++ b/js/myapp/login.js @@ ... @@ }); + + this.LOGIN_AREA_SELECTOR = '#login-area'; + this.$LOGIN_AREA_SELECTOR = $(this.LOGIN_AREA_SELECTOR); + this.TEMPLATE_LOGINED_SELECTOR = '#logined-template'; + this.$TEMPLATE_LOGINED_SELECTOR = $(this.TEMPLATE_LOGINED_SELECTOR); + this.TEMPLATE_NOT_LOGIN_SELECTOR = '#not-login-template'; + this.$TEMPLATE_NOT_LOGIN_SELECTOR = $(this.TEMPLATE_NOT_LOGIN_SELECTOR); }
--- a/js/myapp/login.js +++ b/js/myapp/login.js @@ -17,2 +17,9 @@ CON }); ADD ADD this.LOGIN_AREA_SELECTOR = '#login-area'; ADD this.$LOGIN_AREA_SELECTOR = $(this.LOGIN_AREA_SELECTOR); ADD this.TEMPLATE_LOGINED_SELECTOR = '#logined-template'; ADD this.$TEMPLATE_LOGINED_SELECTOR = $(this.TEMPLATE_LOGINED_SELECTOR); ADD this.TEMPLATE_NOT_LOGIN_SELECTOR = '#not-login-template'; ADD this.$TEMPLATE_NOT_LOGIN_SELECTOR = $(this.TEMPLATE_NOT_LOGIN_SELECTOR); CON }
<<<<<<< SEARCH switchSelector: switchSelector }); } } ======= switchSelector: switchSelector }); this.LOGIN_AREA_SELECTOR = '#login-area'; this.$LOGIN_AREA_SELECTOR = $(this.LOGIN_AREA_SELECTOR); this.TEMPLATE_LOGINED_SELECTOR = '#logined-template'; this.$TEMPLATE_LOGINED_SELECTOR = $(this.TEMPLATE_LOGINED_SELECTOR); this.TEMPLATE_NOT_LOGIN_SELECTOR = '#not-login-template'; this.$TEMPLATE_NOT_LOGIN_SELECTOR = $(this.TEMPLATE_NOT_LOGIN_SELECTOR); } } >>>>>>> REPLACE
AlexandreCollet/mewpipe_webapp
83f970f78d1678608ae450caf65466f9c21b745e
app/app.config.js
javascript
mit
Move register on /user/ url
angular.module('mewpipe') .config(function(toastrConfig,$resourceProvider,$authProvider,$httpProvider,Config){ /** * Toastr */ angular.extend(toastrConfig, { positionClass : 'toast-bottom-right', closeButton : true, }); /** * Resource Provider */ $resourceProvider.defaults.stripTrailingSlashes = false; /** * HTTP Provider */ $httpProvider.interceptors.push('errorsInterceptor'); $httpProvider.interceptors.push('tokenInterceptor'); /** * Auth Provider */ $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; $authProvider.loginUrl = '/login/'; $authProvider.signupUrl = '/register/'; $authProvider.loginRoute = '/login'; $authProvider.signupRoute = '/register'; $authProvider.unlinkUrl = '/logout'; $authProvider.unlinkMethod = 'post'; $authProvider.facebook({ url : '/facebook', clientId : '950692341649325' }); });
angular.module('mewpipe') .config(function(toastrConfig,$resourceProvider,$authProvider,$httpProvider,Config){ /** * Toastr */ angular.extend(toastrConfig, { positionClass : 'toast-bottom-right', closeButton : true, }); /** * Resource Provider */ $resourceProvider.defaults.stripTrailingSlashes = false; /** * HTTP Provider */ $httpProvider.interceptors.push('errorsInterceptor'); $httpProvider.interceptors.push('tokenInterceptor'); /** * Auth Provider */ $authProvider.loginOnSignup = false; $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; $authProvider.loginUrl = '/login/'; $authProvider.signupUrl = '/user/'; $authProvider.loginRoute = '/login'; $authProvider.signupRoute = '/register'; $authProvider.unlinkUrl = '/logout'; $authProvider.unlinkMethod = 'post'; $authProvider.facebook({ url : '/facebook', clientId : '950692341649325' }); });
3
1
2
mixed
--- a/app/app.config.js +++ b/app/app.config.js @@ -29,2 +29,4 @@ + $authProvider.loginOnSignup = false; + $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; @@ -32,3 +34,3 @@ $authProvider.loginUrl = '/login/'; - $authProvider.signupUrl = '/register/'; + $authProvider.signupUrl = '/user/'; $authProvider.loginRoute = '/login';
--- a/app/app.config.js +++ b/app/app.config.js @@ ... @@ + $authProvider.loginOnSignup = false; + $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; @@ ... @@ $authProvider.loginUrl = '/login/'; - $authProvider.signupUrl = '/register/'; + $authProvider.signupUrl = '/user/'; $authProvider.loginRoute = '/login';
--- a/app/app.config.js +++ b/app/app.config.js @@ -29,2 +29,4 @@ CON ADD $authProvider.loginOnSignup = false; ADD CON $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; @@ -32,3 +34,3 @@ CON $authProvider.loginUrl = '/login/'; DEL $authProvider.signupUrl = '/register/'; ADD $authProvider.signupUrl = '/user/'; CON $authProvider.loginRoute = '/login';
<<<<<<< SEARCH */ $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; $authProvider.loginUrl = '/login/'; $authProvider.signupUrl = '/register/'; $authProvider.loginRoute = '/login'; $authProvider.signupRoute = '/register'; ======= */ $authProvider.loginOnSignup = false; $authProvider.baseUrl = Config.server.url + ':' + Config.server.port + '/api'; $authProvider.loginUrl = '/login/'; $authProvider.signupUrl = '/user/'; $authProvider.loginRoute = '/login'; $authProvider.signupRoute = '/register'; >>>>>>> REPLACE
futabooo/conference-app-2017
1749fe077f151e6d2923f56a0797e1ba5c7efe68
app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt
kotlin
apache-2.0
Add succeed and fail tests
package io.github.droidkaigi.confsched2017.util import org.junit.Assert.fail import org.junit.Test import org.junit.runner.RunWith import org.robolectric.RobolectricTestRunner import java.lang.reflect.InvocationTargetException @RunWith(RobolectricTestRunner::class) class AssetsUtilTest { @Test @Throws(Exception::class) fun ctor() { try { val ctor = AssetsUtil::class.java.getDeclaredConstructor() ctor.isAccessible = true ctor.newInstance() } catch (e: InvocationTargetException) { if (e.cause !is AssertionError) fail() } } }
package io.github.droidkaigi.confsched2017.util import com.google.firebase.FirebaseApp import com.taroid.knit.should import org.junit.Assert.fail import org.junit.Test import org.junit.runner.RunWith import org.robolectric.RobolectricTestRunner import org.robolectric.RuntimeEnvironment import java.lang.reflect.InvocationTargetException @RunWith(RobolectricTestRunner::class) class AssetsUtilTest { @Test @Throws(Exception::class) fun ctor() { try { val ctor = AssetsUtil::class.java.getDeclaredConstructor() ctor.isAccessible = true ctor.newInstance() } catch (e: InvocationTargetException) { if (e.cause !is AssertionError) fail() } } @Test @Throws(Exception::class) fun loadJSONFromAsset_succseedsWhenFileExists() { val context = RuntimeEnvironment.application context.assets.list("json").forEach { val expect = context.assets.open("json/" + it) .reader(charset = Charsets.UTF_8) .use { it.readText() } val actual = AssetsUtil.loadJSONFromAsset(context, it) actual.should be expect } } @Test @Throws(Exception::class) fun loadJSONFromAsset_failsWhenFileNotExists() { FirebaseApp.initializeApp(RuntimeEnvironment.application) AssetsUtil.loadJSONFromAsset(RuntimeEnvironment.application, "NonExistsFile.json").should be null } }
24
0
3
add_only
--- a/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt +++ b/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt @@ -2,2 +2,4 @@ +import com.google.firebase.FirebaseApp +import com.taroid.knit.should import org.junit.Assert.fail @@ -6,2 +8,3 @@ import org.robolectric.RobolectricTestRunner +import org.robolectric.RuntimeEnvironment import java.lang.reflect.InvocationTargetException @@ -22,2 +25,23 @@ } + + @Test + @Throws(Exception::class) + fun loadJSONFromAsset_succseedsWhenFileExists() { + val context = RuntimeEnvironment.application + context.assets.list("json").forEach { + val expect = context.assets.open("json/" + it) + .reader(charset = Charsets.UTF_8) + .use { it.readText() } + val actual = AssetsUtil.loadJSONFromAsset(context, it) + + actual.should be expect + } + } + + @Test + @Throws(Exception::class) + fun loadJSONFromAsset_failsWhenFileNotExists() { + FirebaseApp.initializeApp(RuntimeEnvironment.application) + AssetsUtil.loadJSONFromAsset(RuntimeEnvironment.application, "NonExistsFile.json").should be null + } }
--- a/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt +++ b/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt @@ ... @@ +import com.google.firebase.FirebaseApp +import com.taroid.knit.should import org.junit.Assert.fail @@ ... @@ import org.robolectric.RobolectricTestRunner +import org.robolectric.RuntimeEnvironment import java.lang.reflect.InvocationTargetException @@ ... @@ } + + @Test + @Throws(Exception::class) + fun loadJSONFromAsset_succseedsWhenFileExists() { + val context = RuntimeEnvironment.application + context.assets.list("json").forEach { + val expect = context.assets.open("json/" + it) + .reader(charset = Charsets.UTF_8) + .use { it.readText() } + val actual = AssetsUtil.loadJSONFromAsset(context, it) + + actual.should be expect + } + } + + @Test + @Throws(Exception::class) + fun loadJSONFromAsset_failsWhenFileNotExists() { + FirebaseApp.initializeApp(RuntimeEnvironment.application) + AssetsUtil.loadJSONFromAsset(RuntimeEnvironment.application, "NonExistsFile.json").should be null + } }
--- a/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt +++ b/app/src/test/java/io/github/droidkaigi/confsched2017/util/AssetsUtilTest.kt @@ -2,2 +2,4 @@ CON ADD import com.google.firebase.FirebaseApp ADD import com.taroid.knit.should CON import org.junit.Assert.fail @@ -6,2 +8,3 @@ CON import org.robolectric.RobolectricTestRunner ADD import org.robolectric.RuntimeEnvironment CON import java.lang.reflect.InvocationTargetException @@ -22,2 +25,23 @@ CON } ADD ADD @Test ADD @Throws(Exception::class) ADD fun loadJSONFromAsset_succseedsWhenFileExists() { ADD val context = RuntimeEnvironment.application ADD context.assets.list("json").forEach { ADD val expect = context.assets.open("json/" + it) ADD .reader(charset = Charsets.UTF_8) ADD .use { it.readText() } ADD val actual = AssetsUtil.loadJSONFromAsset(context, it) ADD ADD actual.should be expect ADD } ADD } ADD ADD @Test ADD @Throws(Exception::class) ADD fun loadJSONFromAsset_failsWhenFileNotExists() { ADD FirebaseApp.initializeApp(RuntimeEnvironment.application) ADD AssetsUtil.loadJSONFromAsset(RuntimeEnvironment.application, "NonExistsFile.json").should be null ADD } CON }
<<<<<<< SEARCH package io.github.droidkaigi.confsched2017.util import org.junit.Assert.fail import org.junit.Test import org.junit.runner.RunWith import org.robolectric.RobolectricTestRunner import java.lang.reflect.InvocationTargetException ======= package io.github.droidkaigi.confsched2017.util import com.google.firebase.FirebaseApp import com.taroid.knit.should import org.junit.Assert.fail import org.junit.Test import org.junit.runner.RunWith import org.robolectric.RobolectricTestRunner import org.robolectric.RuntimeEnvironment import java.lang.reflect.InvocationTargetException >>>>>>> REPLACE <<<<<<< SEARCH } } } ======= } } @Test @Throws(Exception::class) fun loadJSONFromAsset_succseedsWhenFileExists() { val context = RuntimeEnvironment.application context.assets.list("json").forEach { val expect = context.assets.open("json/" + it) .reader(charset = Charsets.UTF_8) .use { it.readText() } val actual = AssetsUtil.loadJSONFromAsset(context, it) actual.should be expect } } @Test @Throws(Exception::class) fun loadJSONFromAsset_failsWhenFileNotExists() { FirebaseApp.initializeApp(RuntimeEnvironment.application) AssetsUtil.loadJSONFromAsset(RuntimeEnvironment.application, "NonExistsFile.json").should be null } } >>>>>>> REPLACE
cloudtools/troposphere
ef72be28dc83ff2c73335c6eb13135cab8affe53
troposphere/sso.py
python
bsd-2-clause
Update SSO per 2020-12-18 changes
# Copyright (c) 2012-2020, Mark Peek <[email protected]> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 18.6.0 from . import AWSObject from troposphere import Tags class Assignment(AWSObject): resource_type = "AWS::SSO::Assignment" props = { 'InstanceArn': (basestring, True), 'PermissionSetArn': (basestring, True), 'PrincipalId': (basestring, True), 'PrincipalType': (basestring, True), 'TargetId': (basestring, True), 'TargetType': (basestring, True), } class PermissionSet(AWSObject): resource_type = "AWS::SSO::PermissionSet" props = { 'Description': (basestring, False), 'InlinePolicy': (basestring, False), 'InstanceArn': (basestring, True), 'ManagedPolicies': ([basestring], False), 'Name': (basestring, True), 'RelayStateType': (basestring, False), 'SessionDuration': (basestring, False), 'Tags': (Tags, False), }
# Copyright (c) 2012-2021, Mark Peek <[email protected]> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from . import AWSObject from . import AWSProperty from troposphere import Tags class Assignment(AWSObject): resource_type = "AWS::SSO::Assignment" props = { 'InstanceArn': (basestring, True), 'PermissionSetArn': (basestring, True), 'PrincipalId': (basestring, True), 'PrincipalType': (basestring, True), 'TargetId': (basestring, True), 'TargetType': (basestring, True), } class AccessControlAttributeValueSourceList(AWSProperty): props = { 'AccessControlAttributeValueSourceList': ([basestring], False), } class AccessControlAttributeValue(AWSProperty): props = { 'Source': (AccessControlAttributeValueSourceList, True), } class AccessControlAttribute(AWSProperty): props = { 'Key': (basestring, True), 'Value': (AccessControlAttributeValue, True), } class InstanceAccessControlAttributeConfiguration(AWSObject): resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration" props = { 'AccessControlAttributes': ([AccessControlAttribute], False), 'InstanceAccessControlAttributeConfiguration': (dict, False), 'InstanceArn': (basestring, True), } class PermissionSet(AWSObject): resource_type = "AWS::SSO::PermissionSet" props = { 'Description': (basestring, False), 'InlinePolicy': (dict, False), 'InstanceArn': (basestring, True), 'ManagedPolicies': ([basestring], False), 'Name': (basestring, True), 'RelayStateType': (basestring, False), 'SessionDuration': (basestring, False), 'Tags': (Tags, False), }
33
3
5
mixed
--- a/troposphere/sso.py +++ b/troposphere/sso.py @@ -1,2 +1,2 @@ -# Copyright (c) 2012-2020, Mark Peek <[email protected]> +# Copyright (c) 2012-2021, Mark Peek <[email protected]> # All rights reserved. @@ -6,3 +6,3 @@ # *** Do not modify - this file is autogenerated *** -# Resource specification version: 18.6.0 +# Resource specification version: 25.0.0 @@ -10,2 +10,3 @@ from . import AWSObject +from . import AWSProperty from troposphere import Tags @@ -26,2 +27,31 @@ +class AccessControlAttributeValueSourceList(AWSProperty): + props = { + 'AccessControlAttributeValueSourceList': ([basestring], False), + } + + +class AccessControlAttributeValue(AWSProperty): + props = { + 'Source': (AccessControlAttributeValueSourceList, True), + } + + +class AccessControlAttribute(AWSProperty): + props = { + 'Key': (basestring, True), + 'Value': (AccessControlAttributeValue, True), + } + + +class InstanceAccessControlAttributeConfiguration(AWSObject): + resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration" + + props = { + 'AccessControlAttributes': ([AccessControlAttribute], False), + 'InstanceAccessControlAttributeConfiguration': (dict, False), + 'InstanceArn': (basestring, True), + } + + class PermissionSet(AWSObject): @@ -31,3 +61,3 @@ 'Description': (basestring, False), - 'InlinePolicy': (basestring, False), + 'InlinePolicy': (dict, False), 'InstanceArn': (basestring, True),
--- a/troposphere/sso.py +++ b/troposphere/sso.py @@ ... @@ -# Copyright (c) 2012-2020, Mark Peek <[email protected]> +# Copyright (c) 2012-2021, Mark Peek <[email protected]> # All rights reserved. @@ ... @@ # *** Do not modify - this file is autogenerated *** -# Resource specification version: 18.6.0 +# Resource specification version: 25.0.0 @@ ... @@ from . import AWSObject +from . import AWSProperty from troposphere import Tags @@ ... @@ +class AccessControlAttributeValueSourceList(AWSProperty): + props = { + 'AccessControlAttributeValueSourceList': ([basestring], False), + } + + +class AccessControlAttributeValue(AWSProperty): + props = { + 'Source': (AccessControlAttributeValueSourceList, True), + } + + +class AccessControlAttribute(AWSProperty): + props = { + 'Key': (basestring, True), + 'Value': (AccessControlAttributeValue, True), + } + + +class InstanceAccessControlAttributeConfiguration(AWSObject): + resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration" + + props = { + 'AccessControlAttributes': ([AccessControlAttribute], False), + 'InstanceAccessControlAttributeConfiguration': (dict, False), + 'InstanceArn': (basestring, True), + } + + class PermissionSet(AWSObject): @@ ... @@ 'Description': (basestring, False), - 'InlinePolicy': (basestring, False), + 'InlinePolicy': (dict, False), 'InstanceArn': (basestring, True),
--- a/troposphere/sso.py +++ b/troposphere/sso.py @@ -1,2 +1,2 @@ DEL # Copyright (c) 2012-2020, Mark Peek <[email protected]> ADD # Copyright (c) 2012-2021, Mark Peek <[email protected]> CON # All rights reserved. @@ -6,3 +6,3 @@ CON # *** Do not modify - this file is autogenerated *** DEL # Resource specification version: 18.6.0 ADD # Resource specification version: 25.0.0 CON @@ -10,2 +10,3 @@ CON from . import AWSObject ADD from . import AWSProperty CON from troposphere import Tags @@ -26,2 +27,31 @@ CON ADD class AccessControlAttributeValueSourceList(AWSProperty): ADD props = { ADD 'AccessControlAttributeValueSourceList': ([basestring], False), ADD } ADD ADD ADD class AccessControlAttributeValue(AWSProperty): ADD props = { ADD 'Source': (AccessControlAttributeValueSourceList, True), ADD } ADD ADD ADD class AccessControlAttribute(AWSProperty): ADD props = { ADD 'Key': (basestring, True), ADD 'Value': (AccessControlAttributeValue, True), ADD } ADD ADD ADD class InstanceAccessControlAttributeConfiguration(AWSObject): ADD resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration" ADD ADD props = { ADD 'AccessControlAttributes': ([AccessControlAttribute], False), ADD 'InstanceAccessControlAttributeConfiguration': (dict, False), ADD 'InstanceArn': (basestring, True), ADD } ADD ADD CON class PermissionSet(AWSObject): @@ -31,3 +61,3 @@ CON 'Description': (basestring, False), DEL 'InlinePolicy': (basestring, False), ADD 'InlinePolicy': (dict, False), CON 'InstanceArn': (basestring, True),
<<<<<<< SEARCH # Copyright (c) 2012-2020, Mark Peek <[email protected]> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 18.6.0 from . import AWSObject from troposphere import Tags ======= # Copyright (c) 2012-2021, Mark Peek <[email protected]> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from . import AWSObject from . import AWSProperty from troposphere import Tags >>>>>>> REPLACE <<<<<<< SEARCH class PermissionSet(AWSObject): resource_type = "AWS::SSO::PermissionSet" props = { 'Description': (basestring, False), 'InlinePolicy': (basestring, False), 'InstanceArn': (basestring, True), 'ManagedPolicies': ([basestring], False), ======= class AccessControlAttributeValueSourceList(AWSProperty): props = { 'AccessControlAttributeValueSourceList': ([basestring], False), } class AccessControlAttributeValue(AWSProperty): props = { 'Source': (AccessControlAttributeValueSourceList, True), } class AccessControlAttribute(AWSProperty): props = { 'Key': (basestring, True), 'Value': (AccessControlAttributeValue, True), } class InstanceAccessControlAttributeConfiguration(AWSObject): resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration" props = { 'AccessControlAttributes': ([AccessControlAttribute], False), 'InstanceAccessControlAttributeConfiguration': (dict, False), 'InstanceArn': (basestring, True), } class PermissionSet(AWSObject): resource_type = "AWS::SSO::PermissionSet" props = { 'Description': (basestring, False), 'InlinePolicy': (dict, False), 'InstanceArn': (basestring, True), 'ManagedPolicies': ([basestring], False), >>>>>>> REPLACE
rajatchhajed/myrrix-recommender
e642ed4da2bb168608ff8602abbfa95bbed83c5e
client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java
java
apache-2.0
Add a few more changes to be consistent with GenericRecommendedItem, like toString()
/* * Copyright Myrrix Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.myrrix.client.translating; public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem { private final String itemID; private final float value; public GenericTranslatedRecommendedItem(String itemID, float value) { this.itemID = itemID; this.value = value; } @Override public String getItemID() { return itemID; } @Override public float getValue() { return value; } }
/* * Copyright Myrrix Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.myrrix.client.translating; import java.io.Serializable; import com.google.common.base.Preconditions; import org.apache.mahout.common.RandomUtils; /** * <p>A simple implementation of {@link TranslatedRecommendedItem}.</p> * * @author Sean Owen */ public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem, Serializable { private final String itemID; private final float value; /** * @throws IllegalArgumentException if item is null or value is NaN or infinite */ public GenericTranslatedRecommendedItem(String itemID, float value) { Preconditions.checkNotNull(itemID); Preconditions.checkArgument(!Float.isNaN(value) && !Float.isInfinite(value)); this.itemID = itemID; this.value = value; } @Override public String getItemID() { return itemID; } @Override public float getValue() { return value; } @Override public String toString() { return "GenericTranslatedRecommendedItem[item:" + itemID + ", value:" + value + ']'; } @Override public int hashCode() { return itemID.hashCode() ^ RandomUtils.hashFloat(value); } @Override public boolean equals(Object o) { if (!(o instanceof GenericTranslatedRecommendedItem)) { return false; } GenericTranslatedRecommendedItem other = (GenericTranslatedRecommendedItem) o; return itemID.equals(other.getItemID()) && value == other.getValue(); } }
35
1
3
mixed
--- a/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java +++ b/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java @@ -18,3 +18,13 @@ -public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem { +import java.io.Serializable; + +import com.google.common.base.Preconditions; +import org.apache.mahout.common.RandomUtils; + +/** + * <p>A simple implementation of {@link TranslatedRecommendedItem}.</p> + * + * @author Sean Owen + */ +public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem, Serializable { @@ -23,3 +33,8 @@ + /** + * @throws IllegalArgumentException if item is null or value is NaN or infinite + */ public GenericTranslatedRecommendedItem(String itemID, float value) { + Preconditions.checkNotNull(itemID); + Preconditions.checkArgument(!Float.isNaN(value) && !Float.isInfinite(value)); this.itemID = itemID; @@ -37,2 +52,21 @@ } + + @Override + public String toString() { + return "GenericTranslatedRecommendedItem[item:" + itemID + ", value:" + value + ']'; + } + + @Override + public int hashCode() { + return itemID.hashCode() ^ RandomUtils.hashFloat(value); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof GenericTranslatedRecommendedItem)) { + return false; + } + GenericTranslatedRecommendedItem other = (GenericTranslatedRecommendedItem) o; + return itemID.equals(other.getItemID()) && value == other.getValue(); + }
--- a/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java +++ b/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java @@ ... @@ -public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem { +import java.io.Serializable; + +import com.google.common.base.Preconditions; +import org.apache.mahout.common.RandomUtils; + +/** + * <p>A simple implementation of {@link TranslatedRecommendedItem}.</p> + * + * @author Sean Owen + */ +public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem, Serializable { @@ ... @@ + /** + * @throws IllegalArgumentException if item is null or value is NaN or infinite + */ public GenericTranslatedRecommendedItem(String itemID, float value) { + Preconditions.checkNotNull(itemID); + Preconditions.checkArgument(!Float.isNaN(value) && !Float.isInfinite(value)); this.itemID = itemID; @@ ... @@ } + + @Override + public String toString() { + return "GenericTranslatedRecommendedItem[item:" + itemID + ", value:" + value + ']'; + } + + @Override + public int hashCode() { + return itemID.hashCode() ^ RandomUtils.hashFloat(value); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof GenericTranslatedRecommendedItem)) { + return false; + } + GenericTranslatedRecommendedItem other = (GenericTranslatedRecommendedItem) o; + return itemID.equals(other.getItemID()) && value == other.getValue(); + }
--- a/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java +++ b/client/src/net/myrrix/client/translating/GenericTranslatedRecommendedItem.java @@ -18,3 +18,13 @@ CON DEL public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem { ADD import java.io.Serializable; ADD ADD import com.google.common.base.Preconditions; ADD import org.apache.mahout.common.RandomUtils; ADD ADD /** ADD * <p>A simple implementation of {@link TranslatedRecommendedItem}.</p> ADD * ADD * @author Sean Owen ADD */ ADD public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem, Serializable { CON @@ -23,3 +33,8 @@ CON ADD /** ADD * @throws IllegalArgumentException if item is null or value is NaN or infinite ADD */ CON public GenericTranslatedRecommendedItem(String itemID, float value) { ADD Preconditions.checkNotNull(itemID); ADD Preconditions.checkArgument(!Float.isNaN(value) && !Float.isInfinite(value)); CON this.itemID = itemID; @@ -37,2 +52,21 @@ CON } ADD ADD @Override ADD public String toString() { ADD return "GenericTranslatedRecommendedItem[item:" + itemID + ", value:" + value + ']'; ADD } ADD ADD @Override ADD public int hashCode() { ADD return itemID.hashCode() ^ RandomUtils.hashFloat(value); ADD } ADD ADD @Override ADD public boolean equals(Object o) { ADD if (!(o instanceof GenericTranslatedRecommendedItem)) { ADD return false; ADD } ADD GenericTranslatedRecommendedItem other = (GenericTranslatedRecommendedItem) o; ADD return itemID.equals(other.getItemID()) && value == other.getValue(); ADD } CON
<<<<<<< SEARCH package net.myrrix.client.translating; public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem { private final String itemID; private final float value; public GenericTranslatedRecommendedItem(String itemID, float value) { this.itemID = itemID; this.value = value; ======= package net.myrrix.client.translating; import java.io.Serializable; import com.google.common.base.Preconditions; import org.apache.mahout.common.RandomUtils; /** * <p>A simple implementation of {@link TranslatedRecommendedItem}.</p> * * @author Sean Owen */ public final class GenericTranslatedRecommendedItem implements TranslatedRecommendedItem, Serializable { private final String itemID; private final float value; /** * @throws IllegalArgumentException if item is null or value is NaN or infinite */ public GenericTranslatedRecommendedItem(String itemID, float value) { Preconditions.checkNotNull(itemID); Preconditions.checkArgument(!Float.isNaN(value) && !Float.isInfinite(value)); this.itemID = itemID; this.value = value; >>>>>>> REPLACE <<<<<<< SEARCH return value; } } ======= return value; } @Override public String toString() { return "GenericTranslatedRecommendedItem[item:" + itemID + ", value:" + value + ']'; } @Override public int hashCode() { return itemID.hashCode() ^ RandomUtils.hashFloat(value); } @Override public boolean equals(Object o) { if (!(o instanceof GenericTranslatedRecommendedItem)) { return false; } GenericTranslatedRecommendedItem other = (GenericTranslatedRecommendedItem) o; return itemID.equals(other.getItemID()) && value == other.getValue(); } } >>>>>>> REPLACE
pushpad/pushpad-java
b2ef13d010d9bc3f50c6c2f2c580433814fbb702
src/xyz/pushpad/Pushpad.java
java
mit
Use hex-encoding for HMAC-SHA1 signature As described in the documentation ( https://pushpad.xyz/docs/identifying_users ), the signature should be hex-encoded. Base64-encoded signatures get rejected.
package xyz.pushpad; import javax.crypto.spec.SecretKeySpec; import javax.crypto.Mac; import java.security.SignatureException; import java.security.NoSuchAlgorithmException; import java.security.InvalidKeyException; import java.util.Base64; public class Pushpad { public String authToken; public String projectId; public Pushpad(String authToken, String projectId) { this.authToken = authToken; this.projectId = projectId; } public String signatureFor(String data) { SecretKeySpec signingKey = new SecretKeySpec(this.authToken.getBytes(), "HmacSHA1"); String encoded = null; try { Mac mac = Mac.getInstance("HmacSHA1"); mac.init(signingKey); byte[] rawHmac = mac.doFinal(data.getBytes()); encoded = Base64.getEncoder().withoutPadding().encodeToString(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) { e.printStackTrace(); } return encoded; } public String path() { return "https://pushpad.xyz/projects/" + this.projectId + "/subscription/edit"; } public String pathFor(String uid) { String uidSignature = this.signatureFor(uid); return this.path() + "?uid=" + uid + "&uid_signature=" + uidSignature; } public Notification buildNotification(String title, String body, String targetUrl) { return new Notification(this, title, body, targetUrl); } }
package xyz.pushpad; import javax.crypto.spec.SecretKeySpec; import javax.crypto.Mac; import java.security.SignatureException; import java.security.NoSuchAlgorithmException; import java.security.InvalidKeyException; import javax.xml.bind.DatatypeConverter; public class Pushpad { public String authToken; public String projectId; public Pushpad(String authToken, String projectId) { this.authToken = authToken; this.projectId = projectId; } public String signatureFor(String data) { SecretKeySpec signingKey = new SecretKeySpec(this.authToken.getBytes(), "HmacSHA1"); String encoded = null; try { Mac mac = Mac.getInstance("HmacSHA1"); mac.init(signingKey); byte[] rawHmac = mac.doFinal(data.getBytes()); encoded = DatatypeConverter.printHexBinary(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) { e.printStackTrace(); } return encoded; } public String path() { return "https://pushpad.xyz/projects/" + this.projectId + "/subscription/edit"; } public String pathFor(String uid) { String uidSignature = this.signatureFor(uid); return this.path() + "?uid=" + uid + "&uid_signature=" + uidSignature; } public Notification buildNotification(String title, String body, String targetUrl) { return new Notification(this, title, body, targetUrl); } }
2
2
2
mixed
--- a/src/xyz/pushpad/Pushpad.java +++ b/src/xyz/pushpad/Pushpad.java @@ -7,3 +7,3 @@ import java.security.InvalidKeyException; -import java.util.Base64; +import javax.xml.bind.DatatypeConverter; @@ -25,3 +25,3 @@ byte[] rawHmac = mac.doFinal(data.getBytes()); - encoded = Base64.getEncoder().withoutPadding().encodeToString(rawHmac); + encoded = DatatypeConverter.printHexBinary(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) {
--- a/src/xyz/pushpad/Pushpad.java +++ b/src/xyz/pushpad/Pushpad.java @@ ... @@ import java.security.InvalidKeyException; -import java.util.Base64; +import javax.xml.bind.DatatypeConverter; @@ ... @@ byte[] rawHmac = mac.doFinal(data.getBytes()); - encoded = Base64.getEncoder().withoutPadding().encodeToString(rawHmac); + encoded = DatatypeConverter.printHexBinary(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) {
--- a/src/xyz/pushpad/Pushpad.java +++ b/src/xyz/pushpad/Pushpad.java @@ -7,3 +7,3 @@ CON import java.security.InvalidKeyException; DEL import java.util.Base64; ADD import javax.xml.bind.DatatypeConverter; CON @@ -25,3 +25,3 @@ CON byte[] rawHmac = mac.doFinal(data.getBytes()); DEL encoded = Base64.getEncoder().withoutPadding().encodeToString(rawHmac); ADD encoded = DatatypeConverter.printHexBinary(rawHmac); CON } catch (NoSuchAlgorithmException | InvalidKeyException e) {
<<<<<<< SEARCH import java.security.NoSuchAlgorithmException; import java.security.InvalidKeyException; import java.util.Base64; public class Pushpad { ======= import java.security.NoSuchAlgorithmException; import java.security.InvalidKeyException; import javax.xml.bind.DatatypeConverter; public class Pushpad { >>>>>>> REPLACE <<<<<<< SEARCH mac.init(signingKey); byte[] rawHmac = mac.doFinal(data.getBytes()); encoded = Base64.getEncoder().withoutPadding().encodeToString(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) { e.printStackTrace(); ======= mac.init(signingKey); byte[] rawHmac = mac.doFinal(data.getBytes()); encoded = DatatypeConverter.printHexBinary(rawHmac); } catch (NoSuchAlgorithmException | InvalidKeyException e) { e.printStackTrace(); >>>>>>> REPLACE
sejalkhatri/WikiEduDashboard
7df79d0ea629e8610e3b2ba7b74acdc06305f805
test/testHelper.js
javascript
mit
Work around gulp error during i18n asset rebuilding The test file requires built assets, so linting fails with an unresolved require while the i18n assets are being built.
const jsdom = require('jsdom'); global.document = jsdom.jsdom("<!doctype html><html><body><div data-current_user='{ \"admin\": false, \"id\": null }' id='react_root'></div></body></html>", { url: 'http://localhost', skipWindowCheck: true }); global.window = document.defaultView; global.navigator = global.window.navigator; const sinon = require('sinon'); const React = require('react'); const ReactDOM = require('react-dom'); const ReactTestUtils = require('react-addons-test-utils'); const $ = require('jquery'); const _ = require('lodash'); const moment = require('moment'); const momentRecur = require('moment-recur'); const I18n = require('../public/assets/javascripts/i18n.js'); const chai = require('chai'); const sinonChai = require('sinon-chai'); global.$ = $; global._ = _; global.sinon = sinon; global.React = React; global.ReactDOM = ReactDOM; global.ReactTestUtils = ReactTestUtils; global.Simulate = ReactTestUtils.Simulate; global.moment = moment; global['moment-recur'] = momentRecur; global.I18n = I18n; global.chai = chai; global.expect = chai.expect; global.assert = chai.assert; global.Features = {}; require('../public/assets/javascripts/i18n/en'); chai.use(sinonChai);
const jsdom = require('jsdom'); global.document = jsdom.jsdom("<!doctype html><html><body><div data-current_user='{ \"admin\": false, \"id\": null }' id='react_root'></div></body></html>", { url: 'http://localhost', skipWindowCheck: true }); global.window = document.defaultView; global.navigator = global.window.navigator; const sinon = require('sinon'); const React = require('react'); const ReactDOM = require('react-dom'); const ReactTestUtils = require('react-addons-test-utils'); const $ = require('jquery'); const _ = require('lodash'); const moment = require('moment'); const momentRecur = require('moment-recur'); const I18n = require('../public/assets/javascripts/i18n.js'); // eslint-disable-line import/no-unresolved const chai = require('chai'); const sinonChai = require('sinon-chai'); global.$ = $; global._ = _; global.sinon = sinon; global.React = React; global.ReactDOM = ReactDOM; global.ReactTestUtils = ReactTestUtils; global.Simulate = ReactTestUtils.Simulate; global.moment = moment; global['moment-recur'] = momentRecur; global.I18n = I18n; global.chai = chai; global.expect = chai.expect; global.assert = chai.assert; global.Features = {}; require('../public/assets/javascripts/i18n/en'); // eslint-disable-line import/no-unresolved chai.use(sinonChai);
2
2
2
mixed
--- a/test/testHelper.js +++ b/test/testHelper.js @@ -18,3 +18,3 @@ const momentRecur = require('moment-recur'); -const I18n = require('../public/assets/javascripts/i18n.js'); +const I18n = require('../public/assets/javascripts/i18n.js'); // eslint-disable-line import/no-unresolved const chai = require('chai'); @@ -37,3 +37,3 @@ -require('../public/assets/javascripts/i18n/en'); +require('../public/assets/javascripts/i18n/en'); // eslint-disable-line import/no-unresolved
--- a/test/testHelper.js +++ b/test/testHelper.js @@ ... @@ const momentRecur = require('moment-recur'); -const I18n = require('../public/assets/javascripts/i18n.js'); +const I18n = require('../public/assets/javascripts/i18n.js'); // eslint-disable-line import/no-unresolved const chai = require('chai'); @@ ... @@ -require('../public/assets/javascripts/i18n/en'); +require('../public/assets/javascripts/i18n/en'); // eslint-disable-line import/no-unresolved
--- a/test/testHelper.js +++ b/test/testHelper.js @@ -18,3 +18,3 @@ CON const momentRecur = require('moment-recur'); DEL const I18n = require('../public/assets/javascripts/i18n.js'); ADD const I18n = require('../public/assets/javascripts/i18n.js'); // eslint-disable-line import/no-unresolved CON const chai = require('chai'); @@ -37,3 +37,3 @@ CON DEL require('../public/assets/javascripts/i18n/en'); ADD require('../public/assets/javascripts/i18n/en'); // eslint-disable-line import/no-unresolved CON
<<<<<<< SEARCH const moment = require('moment'); const momentRecur = require('moment-recur'); const I18n = require('../public/assets/javascripts/i18n.js'); const chai = require('chai'); const sinonChai = require('sinon-chai'); ======= const moment = require('moment'); const momentRecur = require('moment-recur'); const I18n = require('../public/assets/javascripts/i18n.js'); // eslint-disable-line import/no-unresolved const chai = require('chai'); const sinonChai = require('sinon-chai'); >>>>>>> REPLACE <<<<<<< SEARCH global.Features = {}; require('../public/assets/javascripts/i18n/en'); chai.use(sinonChai); ======= global.Features = {}; require('../public/assets/javascripts/i18n/en'); // eslint-disable-line import/no-unresolved chai.use(sinonChai); >>>>>>> REPLACE
singularities/circular-works
10b77c9d4cb4520152d52e1a006ffc19a5c8783e
frontend/app/components/admin-form-modal.js
javascript
agpl-3.0
Add note on error handling
import Ember from 'ember'; export default Ember.Component.extend({ openConfirmRemove: false, actions: { cancel () { this.cleanErrors(); this.get('organization').rollbackAttributes(); }, save () { let organization = this.get('organization'); organization.save().then(() => { this.cleanErrors(); this.set('open', false); }).catch((error) => { this.set('organizationErrors', organization.get('errors')); this.set('requestErrors', error.errors); }); }, }, cleanErrors () { this.set('organizationErrors', null); this.set('requestErrors', null); } });
import Ember from 'ember'; export default Ember.Component.extend({ openConfirmRemove: false, actions: { cancel () { this.cleanErrors(); this.get('organization').rollbackAttributes(); }, save () { let organization = this.get('organization'); organization.save().then(() => { this.cleanErrors(); this.set('open', false); }).catch((error) => { /* TODO * Better handling of errors * Currently it is dealing with * AdapterError.errors // => ["You need to sign in...."] * but also there are: * ErrorClass.errors // => [{ detail: 'The adapter ... invalid', title: 'AdapterError'}] * but maybe these should be prevented on client */ this.set('organizationErrors', organization.get('errors')); this.set('requestErrors', error.errors); }); }, }, cleanErrors () { this.set('organizationErrors', null); this.set('requestErrors', null); } });
8
0
1
add_only
--- a/frontend/app/components/admin-form-modal.js +++ b/frontend/app/components/admin-form-modal.js @@ -22,2 +22,10 @@ }).catch((error) => { + /* TODO + * Better handling of errors + * Currently it is dealing with + * AdapterError.errors // => ["You need to sign in...."] + * but also there are: + * ErrorClass.errors // => [{ detail: 'The adapter ... invalid', title: 'AdapterError'}] + * but maybe these should be prevented on client + */ this.set('organizationErrors', organization.get('errors'));
--- a/frontend/app/components/admin-form-modal.js +++ b/frontend/app/components/admin-form-modal.js @@ ... @@ }).catch((error) => { + /* TODO + * Better handling of errors + * Currently it is dealing with + * AdapterError.errors // => ["You need to sign in...."] + * but also there are: + * ErrorClass.errors // => [{ detail: 'The adapter ... invalid', title: 'AdapterError'}] + * but maybe these should be prevented on client + */ this.set('organizationErrors', organization.get('errors'));
--- a/frontend/app/components/admin-form-modal.js +++ b/frontend/app/components/admin-form-modal.js @@ -22,2 +22,10 @@ CON }).catch((error) => { ADD /* TODO ADD * Better handling of errors ADD * Currently it is dealing with ADD * AdapterError.errors // => ["You need to sign in...."] ADD * but also there are: ADD * ErrorClass.errors // => [{ detail: 'The adapter ... invalid', title: 'AdapterError'}] ADD * but maybe these should be prevented on client ADD */ CON this.set('organizationErrors', organization.get('errors'));
<<<<<<< SEARCH this.set('open', false); }).catch((error) => { this.set('organizationErrors', organization.get('errors')); this.set('requestErrors', error.errors); ======= this.set('open', false); }).catch((error) => { /* TODO * Better handling of errors * Currently it is dealing with * AdapterError.errors // => ["You need to sign in...."] * but also there are: * ErrorClass.errors // => [{ detail: 'The adapter ... invalid', title: 'AdapterError'}] * but maybe these should be prevented on client */ this.set('organizationErrors', organization.get('errors')); this.set('requestErrors', error.errors); >>>>>>> REPLACE
square/leakcanary
ac63aa174bd29217ac3f683281db9977b8dbba3b
leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt
kotlin
apache-2.0
Migrate TestUtil to use the newer API
package leakcanary import leakcanary.InstrumentationLeakDetector.Result.AnalysisPerformed import leakcanary.InstrumentationLeakDetector.Result.NoAnalysis import shark.HeapAnalysisSuccess object TestUtils { fun assertLeak(expectedLeakClass: Class<*>) { val leakDetector = InstrumentationLeakDetector() val heapAnalysis = when (val result = leakDetector.detectLeaks()) { is NoAnalysis -> throw AssertionError( "Expected analysis to be performed but skipped because ${result.reason}" ) is AnalysisPerformed -> result.heapAnalysis } if (heapAnalysis !is HeapAnalysisSuccess) { throw AssertionError( "Expected analysis success not $heapAnalysis" ) } val applicationLeaks = heapAnalysis.applicationLeaks if (applicationLeaks.size != 1) { throw AssertionError( "Expected exactly one leak in $heapAnalysis" ) } val leak = applicationLeaks.first() val leakTrace = leak.leakTraces.first() val className = leakTrace.leakingObject.className if (className != expectedLeakClass.name) { throw AssertionError( "Expected a leak of $expectedLeakClass, not $className in $heapAnalysis" ) } } }
package leakcanary import shark.HeapAnalysis import shark.HeapAnalysisSuccess object TestUtils { fun assertLeak(expectedLeakClass: Class<*>) { var heapAnalysisOrNull: HeapAnalysis? = null AndroidDetectLeaksAssert { heapAnalysis -> heapAnalysisOrNull = heapAnalysis }.assertNoLeaks("") if (heapAnalysisOrNull == null) { throw AssertionError( "Expected analysis to be performed but skipped" ) } val heapAnalysis = heapAnalysisOrNull if (heapAnalysis !is HeapAnalysisSuccess) { throw AssertionError( "Expected analysis success not $heapAnalysis" ) } val applicationLeaks = heapAnalysis.applicationLeaks if (applicationLeaks.size != 1) { throw AssertionError( "Expected exactly one leak in $heapAnalysis" ) } val leak = applicationLeaks.first() val leakTrace = leak.leakTraces.first() val className = leakTrace.leakingObject.className if (className != expectedLeakClass.name) { throw AssertionError( "Expected a leak of $expectedLeakClass, not $className in $heapAnalysis" ) } } }
10
7
2
mixed
--- a/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt +++ b/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt @@ -2,4 +2,3 @@ -import leakcanary.InstrumentationLeakDetector.Result.AnalysisPerformed -import leakcanary.InstrumentationLeakDetector.Result.NoAnalysis +import shark.HeapAnalysis import shark.HeapAnalysisSuccess @@ -8,10 +7,14 @@ fun assertLeak(expectedLeakClass: Class<*>) { - val leakDetector = InstrumentationLeakDetector() + var heapAnalysisOrNull: HeapAnalysis? = null + AndroidDetectLeaksAssert { heapAnalysis -> + heapAnalysisOrNull = heapAnalysis + }.assertNoLeaks("") - val heapAnalysis = when (val result = leakDetector.detectLeaks()) { - is NoAnalysis -> throw AssertionError( - "Expected analysis to be performed but skipped because ${result.reason}" + if (heapAnalysisOrNull == null) { + throw AssertionError( + "Expected analysis to be performed but skipped" ) - is AnalysisPerformed -> result.heapAnalysis } + + val heapAnalysis = heapAnalysisOrNull
--- a/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt +++ b/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt @@ ... @@ -import leakcanary.InstrumentationLeakDetector.Result.AnalysisPerformed -import leakcanary.InstrumentationLeakDetector.Result.NoAnalysis +import shark.HeapAnalysis import shark.HeapAnalysisSuccess @@ ... @@ fun assertLeak(expectedLeakClass: Class<*>) { - val leakDetector = InstrumentationLeakDetector() + var heapAnalysisOrNull: HeapAnalysis? = null + AndroidDetectLeaksAssert { heapAnalysis -> + heapAnalysisOrNull = heapAnalysis + }.assertNoLeaks("") - val heapAnalysis = when (val result = leakDetector.detectLeaks()) { - is NoAnalysis -> throw AssertionError( - "Expected analysis to be performed but skipped because ${result.reason}" + if (heapAnalysisOrNull == null) { + throw AssertionError( + "Expected analysis to be performed but skipped" ) - is AnalysisPerformed -> result.heapAnalysis } + + val heapAnalysis = heapAnalysisOrNull
--- a/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt +++ b/leakcanary-android-instrumentation/src/androidTest/java/leakcanary/TestUtils.kt @@ -2,4 +2,3 @@ CON DEL import leakcanary.InstrumentationLeakDetector.Result.AnalysisPerformed DEL import leakcanary.InstrumentationLeakDetector.Result.NoAnalysis ADD import shark.HeapAnalysis CON import shark.HeapAnalysisSuccess @@ -8,10 +7,14 @@ CON fun assertLeak(expectedLeakClass: Class<*>) { DEL val leakDetector = InstrumentationLeakDetector() ADD var heapAnalysisOrNull: HeapAnalysis? = null ADD AndroidDetectLeaksAssert { heapAnalysis -> ADD heapAnalysisOrNull = heapAnalysis ADD }.assertNoLeaks("") CON DEL val heapAnalysis = when (val result = leakDetector.detectLeaks()) { DEL is NoAnalysis -> throw AssertionError( DEL "Expected analysis to be performed but skipped because ${result.reason}" ADD if (heapAnalysisOrNull == null) { ADD throw AssertionError( ADD "Expected analysis to be performed but skipped" CON ) DEL is AnalysisPerformed -> result.heapAnalysis CON } ADD ADD val heapAnalysis = heapAnalysisOrNull CON
<<<<<<< SEARCH package leakcanary import leakcanary.InstrumentationLeakDetector.Result.AnalysisPerformed import leakcanary.InstrumentationLeakDetector.Result.NoAnalysis import shark.HeapAnalysisSuccess object TestUtils { fun assertLeak(expectedLeakClass: Class<*>) { val leakDetector = InstrumentationLeakDetector() val heapAnalysis = when (val result = leakDetector.detectLeaks()) { is NoAnalysis -> throw AssertionError( "Expected analysis to be performed but skipped because ${result.reason}" ) is AnalysisPerformed -> result.heapAnalysis } if (heapAnalysis !is HeapAnalysisSuccess) { ======= package leakcanary import shark.HeapAnalysis import shark.HeapAnalysisSuccess object TestUtils { fun assertLeak(expectedLeakClass: Class<*>) { var heapAnalysisOrNull: HeapAnalysis? = null AndroidDetectLeaksAssert { heapAnalysis -> heapAnalysisOrNull = heapAnalysis }.assertNoLeaks("") if (heapAnalysisOrNull == null) { throw AssertionError( "Expected analysis to be performed but skipped" ) } val heapAnalysis = heapAnalysisOrNull if (heapAnalysis !is HeapAnalysisSuccess) { >>>>>>> REPLACE <<<<<<< SEARCH } } } ======= } } } >>>>>>> REPLACE
PressLabs/gitfs
4dd86439d4c8393ac9c3bb6b958a1c8cb45b243a
gitfs/views/history_index.py
python
apache-2.0
Add mandatory methods to HistoryIndexView (refactor when working)
from .view import View class HistoryIndexView(View): pass
from .view import View from errno import ENOENT from stat import S_IFDIR from gitfs import FuseMethodNotImplemented, FuseOSError from log import log class HistoryIndexView(View): def getattr(self, path, fh=None): ''' Returns a dictionary with keys identical to the stat C structure of stat(2). st_atime, st_mtime and st_ctime should be floats. NOTE: There is an incombatibility between Linux and Mac OS X concerning st_nlink of directories. Mac OS X counts all files inside the directory, while Linux counts only the subdirectories. ''' if path != '/': raise FuseOSError(ENOENT) return dict(st_mode=(S_IFDIR | 0755), st_nlink=2) def opendir(self, path): return 0 def releasedir(self, path, fi): pass def access(self, path, amode): log.info('%s %s', path, amode) return 0 def readdir(self, path, fh): return ['.', '..', 'commits everywhere']
36
1
2
mixed
--- a/gitfs/views/history_index.py +++ b/gitfs/views/history_index.py @@ -1,2 +1,8 @@ from .view import View + +from errno import ENOENT +from stat import S_IFDIR + +from gitfs import FuseMethodNotImplemented, FuseOSError +from log import log @@ -4,2 +10,31 @@ class HistoryIndexView(View): - pass + + def getattr(self, path, fh=None): + ''' + Returns a dictionary with keys identical to the stat C structure of + stat(2). + + st_atime, st_mtime and st_ctime should be floats. + + NOTE: There is an incombatibility between Linux and Mac OS X + concerning st_nlink of directories. Mac OS X counts all files inside + the directory, while Linux counts only the subdirectories. + ''' + + if path != '/': + raise FuseOSError(ENOENT) + return dict(st_mode=(S_IFDIR | 0755), st_nlink=2) + + + def opendir(self, path): + return 0 + + def releasedir(self, path, fi): + pass + + def access(self, path, amode): + log.info('%s %s', path, amode) + return 0 + + def readdir(self, path, fh): + return ['.', '..', 'commits everywhere']
--- a/gitfs/views/history_index.py +++ b/gitfs/views/history_index.py @@ ... @@ from .view import View + +from errno import ENOENT +from stat import S_IFDIR + +from gitfs import FuseMethodNotImplemented, FuseOSError +from log import log @@ ... @@ class HistoryIndexView(View): - pass + + def getattr(self, path, fh=None): + ''' + Returns a dictionary with keys identical to the stat C structure of + stat(2). + + st_atime, st_mtime and st_ctime should be floats. + + NOTE: There is an incombatibility between Linux and Mac OS X + concerning st_nlink of directories. Mac OS X counts all files inside + the directory, while Linux counts only the subdirectories. + ''' + + if path != '/': + raise FuseOSError(ENOENT) + return dict(st_mode=(S_IFDIR | 0755), st_nlink=2) + + + def opendir(self, path): + return 0 + + def releasedir(self, path, fi): + pass + + def access(self, path, amode): + log.info('%s %s', path, amode) + return 0 + + def readdir(self, path, fh): + return ['.', '..', 'commits everywhere']
--- a/gitfs/views/history_index.py +++ b/gitfs/views/history_index.py @@ -1,2 +1,8 @@ CON from .view import View ADD ADD from errno import ENOENT ADD from stat import S_IFDIR ADD ADD from gitfs import FuseMethodNotImplemented, FuseOSError ADD from log import log CON @@ -4,2 +10,31 @@ CON class HistoryIndexView(View): DEL pass ADD ADD def getattr(self, path, fh=None): ADD ''' ADD Returns a dictionary with keys identical to the stat C structure of ADD stat(2). ADD ADD st_atime, st_mtime and st_ctime should be floats. ADD ADD NOTE: There is an incombatibility between Linux and Mac OS X ADD concerning st_nlink of directories. Mac OS X counts all files inside ADD the directory, while Linux counts only the subdirectories. ADD ''' ADD ADD if path != '/': ADD raise FuseOSError(ENOENT) ADD return dict(st_mode=(S_IFDIR | 0755), st_nlink=2) ADD ADD ADD def opendir(self, path): ADD return 0 ADD ADD def releasedir(self, path, fi): ADD pass ADD ADD def access(self, path, amode): ADD log.info('%s %s', path, amode) ADD return 0 ADD ADD def readdir(self, path, fh): ADD return ['.', '..', 'commits everywhere']
<<<<<<< SEARCH from .view import View class HistoryIndexView(View): pass ======= from .view import View from errno import ENOENT from stat import S_IFDIR from gitfs import FuseMethodNotImplemented, FuseOSError from log import log class HistoryIndexView(View): def getattr(self, path, fh=None): ''' Returns a dictionary with keys identical to the stat C structure of stat(2). st_atime, st_mtime and st_ctime should be floats. NOTE: There is an incombatibility between Linux and Mac OS X concerning st_nlink of directories. Mac OS X counts all files inside the directory, while Linux counts only the subdirectories. ''' if path != '/': raise FuseOSError(ENOENT) return dict(st_mode=(S_IFDIR | 0755), st_nlink=2) def opendir(self, path): return 0 def releasedir(self, path, fi): pass def access(self, path, amode): log.info('%s %s', path, amode) return 0 def readdir(self, path, fh): return ['.', '..', 'commits everywhere'] >>>>>>> REPLACE
GeotrekCE/Geotrek-admin
34db760c5b763ad2df02398d58ea417b47b785e7
geotrek/zoning/views.py
python
bsd-2-clause
Change cache land, use settings mapentity
from django.shortcuts import get_object_or_404 from django.views.decorators.cache import cache_page from django.conf import settings from django.utils.decorators import method_decorator from djgeojson.views import GeoJSONLayerView from .models import City, RestrictedArea, RestrictedAreaType, District class LandLayerMixin(object): srid = settings.API_SRID precision = settings.LAYER_PRECISION_LAND simplify = settings.LAYER_SIMPLIFY_LAND @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat")) def dispatch(self, request, *args, **kwargs): return super(LandLayerMixin, self).dispatch(request, *args, **kwargs) class CityGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = City class RestrictedAreaGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = RestrictedArea class RestrictedAreaTypeGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = RestrictedArea def get_queryset(self): type_pk = self.kwargs['type_pk'] qs = super(RestrictedAreaTypeGeoJSONLayer, self).get_queryset() get_object_or_404(RestrictedAreaType, pk=type_pk) return qs.filter(area_type=type_pk) class DistrictGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = District properties = ['name']
from django.shortcuts import get_object_or_404 from django.views.decorators.cache import cache_page from django.conf import settings from django.utils.decorators import method_decorator from djgeojson.views import GeoJSONLayerView from .models import City, RestrictedArea, RestrictedAreaType, District class LandLayerMixin(object): srid = settings.API_SRID precision = settings.LAYER_PRECISION_LAND simplify = settings.LAYER_SIMPLIFY_LAND @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND'])) def dispatch(self, request, *args, **kwargs): return super(LandLayerMixin, self).dispatch(request, *args, **kwargs) class CityGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = City class RestrictedAreaGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = RestrictedArea class RestrictedAreaTypeGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = RestrictedArea def get_queryset(self): type_pk = self.kwargs['type_pk'] qs = super(RestrictedAreaTypeGeoJSONLayer, self).get_queryset() get_object_or_404(RestrictedAreaType, pk=type_pk) return qs.filter(area_type=type_pk) class DistrictGeoJSONLayer(LandLayerMixin, GeoJSONLayerView): model = District properties = ['name']
2
1
1
mixed
--- a/geotrek/zoning/views.py +++ b/geotrek/zoning/views.py @@ -14,3 +14,4 @@ - @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat")) + @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, + cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND'])) def dispatch(self, request, *args, **kwargs):
--- a/geotrek/zoning/views.py +++ b/geotrek/zoning/views.py @@ ... @@ - @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat")) + @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, + cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND'])) def dispatch(self, request, *args, **kwargs):
--- a/geotrek/zoning/views.py +++ b/geotrek/zoning/views.py @@ -14,3 +14,4 @@ CON DEL @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat")) ADD @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, ADD cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND'])) CON def dispatch(self, request, *args, **kwargs):
<<<<<<< SEARCH simplify = settings.LAYER_SIMPLIFY_LAND @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache="fat")) def dispatch(self, request, *args, **kwargs): return super(LandLayerMixin, self).dispatch(request, *args, **kwargs) ======= simplify = settings.LAYER_SIMPLIFY_LAND @method_decorator(cache_page(settings.CACHE_TIMEOUT_LAND_LAYERS, cache=settings.MAPENTITY_CONFIG['GEOJSON_LAYERS_CACHE_BACKEND'])) def dispatch(self, request, *args, **kwargs): return super(LandLayerMixin, self).dispatch(request, *args, **kwargs) >>>>>>> REPLACE
patriziosotgiu/sc5-styleguide
ab51d92adb61958580c00b219c96e50bed8b20c0
demo-gulpfile.js
javascript
mit
Fix typo in demo gulp file
var gulp = require('gulp'), styleguide = require('./lib/styleguide'), source = 'lib/app/**/*.scss', ouputPath = 'demo-output'; gulp.task('styleguide', ['static'], function() { return gulp.src(source) .pipe(styleguide({ title: 'SC5 Styleguide', server: true, rootPath: ouputPath, overviewPath: 'README.md', styleVariables: 'lib/app/sass/_styleguide_variables.scss', sass: { src: 'lib/app/sass/app.scss', includePaths: [ 'node_modules/node-bourbon/assets/stylesheets', 'node_modules/node-neat/assets/stylesheets' ] }, filesConfig: [ { name: 'sgAppTest', files: [ 'demo/testDirective.js' ], template: 'demo/testDirective.html' } ] })) .pipe(gulp.dest(ouputPath)); }); gulp.task('static', function() { gulp.src(['lib/demo/**']) .pipe(gulp.dest(ouputPath + '/demo')); }); gulp.task('watch', ['styleguide'], function() { // Start watching changes and update styleguide whenever changes are detected gulp.watch(source, ['styleguide']); });
var gulp = require('gulp'), styleguide = require('./lib/styleguide'), source = 'lib/app/**/*.scss', outputPath = 'demo-output'; gulp.task('styleguide', ['static'], function() { return gulp.src(source) .pipe(styleguide({ title: 'SC5 Styleguide', server: true, rootPath: outputPath, overviewPath: 'README.md', styleVariables: 'lib/app/sass/_styleguide_variables.scss', sass: { src: 'lib/app/sass/app.scss', includePaths: [ 'node_modules/node-bourbon/assets/stylesheets', 'node_modules/node-neat/assets/stylesheets' ] }, filesConfig: [ { name: 'sgAppTest', files: [ 'demo/testDirective.js' ], template: 'demo/testDirective.html' } ] })) .pipe(gulp.dest(outputPath)); }); gulp.task('static', function() { gulp.src(['lib/demo/**']) .pipe(gulp.dest(outputPath + '/demo')); }); gulp.task('watch', ['styleguide'], function() { // Start watching changes and update styleguide whenever changes are detected gulp.watch(source, ['styleguide']); });
4
4
4
mixed
--- a/demo-gulpfile.js +++ b/demo-gulpfile.js @@ -3,3 +3,3 @@ source = 'lib/app/**/*.scss', - ouputPath = 'demo-output'; + outputPath = 'demo-output'; @@ -10,3 +10,3 @@ server: true, - rootPath: ouputPath, + rootPath: outputPath, overviewPath: 'README.md', @@ -30,3 +30,3 @@ })) - .pipe(gulp.dest(ouputPath)); + .pipe(gulp.dest(outputPath)); }); @@ -35,3 +35,3 @@ gulp.src(['lib/demo/**']) - .pipe(gulp.dest(ouputPath + '/demo')); + .pipe(gulp.dest(outputPath + '/demo')); });
--- a/demo-gulpfile.js +++ b/demo-gulpfile.js @@ ... @@ source = 'lib/app/**/*.scss', - ouputPath = 'demo-output'; + outputPath = 'demo-output'; @@ ... @@ server: true, - rootPath: ouputPath, + rootPath: outputPath, overviewPath: 'README.md', @@ ... @@ })) - .pipe(gulp.dest(ouputPath)); + .pipe(gulp.dest(outputPath)); }); @@ ... @@ gulp.src(['lib/demo/**']) - .pipe(gulp.dest(ouputPath + '/demo')); + .pipe(gulp.dest(outputPath + '/demo')); });
--- a/demo-gulpfile.js +++ b/demo-gulpfile.js @@ -3,3 +3,3 @@ CON source = 'lib/app/**/*.scss', DEL ouputPath = 'demo-output'; ADD outputPath = 'demo-output'; CON @@ -10,3 +10,3 @@ CON server: true, DEL rootPath: ouputPath, ADD rootPath: outputPath, CON overviewPath: 'README.md', @@ -30,3 +30,3 @@ CON })) DEL .pipe(gulp.dest(ouputPath)); ADD .pipe(gulp.dest(outputPath)); CON }); @@ -35,3 +35,3 @@ CON gulp.src(['lib/demo/**']) DEL .pipe(gulp.dest(ouputPath + '/demo')); ADD .pipe(gulp.dest(outputPath + '/demo')); CON });
<<<<<<< SEARCH styleguide = require('./lib/styleguide'), source = 'lib/app/**/*.scss', ouputPath = 'demo-output'; gulp.task('styleguide', ['static'], function() { ======= styleguide = require('./lib/styleguide'), source = 'lib/app/**/*.scss', outputPath = 'demo-output'; gulp.task('styleguide', ['static'], function() { >>>>>>> REPLACE <<<<<<< SEARCH title: 'SC5 Styleguide', server: true, rootPath: ouputPath, overviewPath: 'README.md', styleVariables: 'lib/app/sass/_styleguide_variables.scss', ======= title: 'SC5 Styleguide', server: true, rootPath: outputPath, overviewPath: 'README.md', styleVariables: 'lib/app/sass/_styleguide_variables.scss', >>>>>>> REPLACE <<<<<<< SEARCH ] })) .pipe(gulp.dest(ouputPath)); }); gulp.task('static', function() { gulp.src(['lib/demo/**']) .pipe(gulp.dest(ouputPath + '/demo')); }); ======= ] })) .pipe(gulp.dest(outputPath)); }); gulp.task('static', function() { gulp.src(['lib/demo/**']) .pipe(gulp.dest(outputPath + '/demo')); }); >>>>>>> REPLACE
nuimk/nmk
2f7f8ebcab920c8455826c48bd19701bfb8bcbd1
nmk/src/nmk/platform.rs
rust
mit
Use Path to check file existence
use once_cell::sync::Lazy; #[allow(dead_code)] #[derive(PartialEq, Clone)] pub enum PlatformType { Unknown, OSX, Linux, Arch, Alpine, } pub fn is_alpine() -> bool { *PLATFORM == PlatformType::Alpine } pub fn is_arch() -> bool { *PLATFORM == PlatformType::Arch } pub fn is_mac() -> bool { *PLATFORM == PlatformType::OSX } static PLATFORM: Lazy<PlatformType> = Lazy::new(what_platform); #[cfg(target_os = "macos")] fn what_platform() -> PlatformType { PlatformType::OSX } #[cfg(target_os = "linux")] fn what_platform() -> PlatformType { use std::path::PathBuf; if PathBuf::from("/etc/alpine-release").exists() { PlatformType::Alpine } else if PathBuf::from("/etc/arch-release").exists() { PlatformType::Arch } else { PlatformType::Linux } } #[cfg(not(any(target_os = "linux", target_os = "macos")))] fn what_platform() -> PlatformType { PlatformType::Unknown }
use once_cell::sync::Lazy; #[allow(dead_code)] #[derive(PartialEq, Clone, Copy)] pub enum PlatformType { Unknown, OSX, Linux, Arch, Alpine, } pub fn is_alpine() -> bool { *PLATFORM == PlatformType::Alpine } pub fn is_arch() -> bool { *PLATFORM == PlatformType::Arch } pub fn is_mac() -> bool { *PLATFORM == PlatformType::OSX } static PLATFORM: Lazy<PlatformType> = Lazy::new(what_platform); #[cfg(target_os = "macos")] fn what_platform() -> PlatformType { PlatformType::OSX } #[cfg(target_os = "linux")] fn what_platform() -> PlatformType { let exists = |s: &str| std::path::Path::new(s).exists(); if exists("/etc/alpine-release") { PlatformType::Alpine } else if exists("/etc/arch-release") { PlatformType::Arch } else { PlatformType::Linux } } #[cfg(not(any(target_os = "linux", target_os = "macos")))] fn what_platform() -> PlatformType { PlatformType::Unknown }
4
4
2
mixed
--- a/nmk/src/nmk/platform.rs +++ b/nmk/src/nmk/platform.rs @@ -3,3 +3,3 @@ #[allow(dead_code)] -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Copy)] pub enum PlatformType { @@ -33,6 +33,6 @@ fn what_platform() -> PlatformType { - use std::path::PathBuf; - if PathBuf::from("/etc/alpine-release").exists() { + let exists = |s: &str| std::path::Path::new(s).exists(); + if exists("/etc/alpine-release") { PlatformType::Alpine - } else if PathBuf::from("/etc/arch-release").exists() { + } else if exists("/etc/arch-release") { PlatformType::Arch
--- a/nmk/src/nmk/platform.rs +++ b/nmk/src/nmk/platform.rs @@ ... @@ #[allow(dead_code)] -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Copy)] pub enum PlatformType { @@ ... @@ fn what_platform() -> PlatformType { - use std::path::PathBuf; - if PathBuf::from("/etc/alpine-release").exists() { + let exists = |s: &str| std::path::Path::new(s).exists(); + if exists("/etc/alpine-release") { PlatformType::Alpine - } else if PathBuf::from("/etc/arch-release").exists() { + } else if exists("/etc/arch-release") { PlatformType::Arch
--- a/nmk/src/nmk/platform.rs +++ b/nmk/src/nmk/platform.rs @@ -3,3 +3,3 @@ CON #[allow(dead_code)] DEL #[derive(PartialEq, Clone)] ADD #[derive(PartialEq, Clone, Copy)] CON pub enum PlatformType { @@ -33,6 +33,6 @@ CON fn what_platform() -> PlatformType { DEL use std::path::PathBuf; DEL if PathBuf::from("/etc/alpine-release").exists() { ADD let exists = |s: &str| std::path::Path::new(s).exists(); ADD if exists("/etc/alpine-release") { CON PlatformType::Alpine DEL } else if PathBuf::from("/etc/arch-release").exists() { ADD } else if exists("/etc/arch-release") { CON PlatformType::Arch
<<<<<<< SEARCH #[allow(dead_code)] #[derive(PartialEq, Clone)] pub enum PlatformType { Unknown, ======= #[allow(dead_code)] #[derive(PartialEq, Clone, Copy)] pub enum PlatformType { Unknown, >>>>>>> REPLACE <<<<<<< SEARCH #[cfg(target_os = "linux")] fn what_platform() -> PlatformType { use std::path::PathBuf; if PathBuf::from("/etc/alpine-release").exists() { PlatformType::Alpine } else if PathBuf::from("/etc/arch-release").exists() { PlatformType::Arch } else { ======= #[cfg(target_os = "linux")] fn what_platform() -> PlatformType { let exists = |s: &str| std::path::Path::new(s).exists(); if exists("/etc/alpine-release") { PlatformType::Alpine } else if exists("/etc/arch-release") { PlatformType::Arch } else { >>>>>>> REPLACE
fifth-postulate/finding-the-planets
b7e3aa877aac0a237c41c24614a7789a5f5efebc
rust-showcase/src/bin/brightness.rs
rust
mit
Remove background noise from signal
extern crate simple_csv; use std::fs::File; use std::io::BufReader; use std::str::FromStr; use simple_csv::{SimpleCsvWriter, SimpleCsvReader}; fn main(){ let f = File::open("../long-cadence.csv").unwrap(); let buf = BufReader::new(f); let reader = SimpleCsvReader::new(buf); let o = File::create("average.csv").unwrap(); let mut writer = SimpleCsvWriter::new(o); for (_, r) in reader.enumerate() { let row = r.unwrap(); let (time, brightness) = sum(row); writer.write( &vec!(time.to_string(), brightness.to_string()) ).unwrap(); } } fn sum(row: Vec<String>) -> (f64, f64) { let mut iter = row.iter(); let time: f64 = f64::from_str(iter.next().unwrap()).unwrap(); let raw: Vec<f64> = iter .map(|s| f64::from_str(s).unwrap()) .collect(); let sum: f64 = raw .iter() .fold(0f64, |acc, v| acc+v); (time, sum) }
extern crate simple_csv; use std::fs::File; use std::io::BufReader; use std::str::FromStr; use simple_csv::{SimpleCsvWriter, SimpleCsvReader}; fn main(){ let f = File::open("../long-cadence.csv").unwrap(); let buf = BufReader::new(f); let reader = SimpleCsvReader::new(buf); let o = File::create("average.csv").unwrap(); let mut writer = SimpleCsvWriter::new(o); for (_, r) in reader.enumerate() { let row = r.unwrap(); let (time, brightness, filtered) = sum(row); writer.write( &vec!(time.to_string(), brightness.to_string(), filtered.to_string()) ).unwrap(); } } fn sum(row: Vec<String>) -> (f64, f64, f64) { let mut iter = row.iter(); let time: f64 = f64::from_str(iter.next().unwrap()).unwrap(); let raw: Vec<f64> = iter .map(|s| f64::from_str(s).unwrap()) .collect(); let sum: f64 = raw .iter() .fold(0f64, |acc, v| acc+v); let average = sum / (row.len() as f64); let filtered: f64 = raw .iter() .filter(|&v| *v >= average) .fold(0f64, |acc, v| acc+v); (time, sum, filtered) }
9
6
2
mixed
--- a/rust-showcase/src/bin/brightness.rs +++ b/rust-showcase/src/bin/brightness.rs @@ -17,13 +17,11 @@ let row = r.unwrap(); - let (time, brightness) = sum(row); + let (time, brightness, filtered) = sum(row); writer.write( - &vec!(time.to_string(), brightness.to_string()) + &vec!(time.to_string(), brightness.to_string(), filtered.to_string()) ).unwrap(); } - - } -fn sum(row: Vec<String>) -> (f64, f64) { +fn sum(row: Vec<String>) -> (f64, f64, f64) { let mut iter = row.iter(); @@ -37,4 +35,9 @@ .fold(0f64, |acc, v| acc+v); + let average = sum / (row.len() as f64); + let filtered: f64 = raw + .iter() + .filter(|&v| *v >= average) + .fold(0f64, |acc, v| acc+v); - (time, sum) + (time, sum, filtered) }
--- a/rust-showcase/src/bin/brightness.rs +++ b/rust-showcase/src/bin/brightness.rs @@ ... @@ let row = r.unwrap(); - let (time, brightness) = sum(row); + let (time, brightness, filtered) = sum(row); writer.write( - &vec!(time.to_string(), brightness.to_string()) + &vec!(time.to_string(), brightness.to_string(), filtered.to_string()) ).unwrap(); } - - } -fn sum(row: Vec<String>) -> (f64, f64) { +fn sum(row: Vec<String>) -> (f64, f64, f64) { let mut iter = row.iter(); @@ ... @@ .fold(0f64, |acc, v| acc+v); + let average = sum / (row.len() as f64); + let filtered: f64 = raw + .iter() + .filter(|&v| *v >= average) + .fold(0f64, |acc, v| acc+v); - (time, sum) + (time, sum, filtered) }
--- a/rust-showcase/src/bin/brightness.rs +++ b/rust-showcase/src/bin/brightness.rs @@ -17,13 +17,11 @@ CON let row = r.unwrap(); DEL let (time, brightness) = sum(row); ADD let (time, brightness, filtered) = sum(row); CON CON writer.write( DEL &vec!(time.to_string(), brightness.to_string()) ADD &vec!(time.to_string(), brightness.to_string(), filtered.to_string()) CON ).unwrap(); CON } DEL DEL CON } CON DEL fn sum(row: Vec<String>) -> (f64, f64) { ADD fn sum(row: Vec<String>) -> (f64, f64, f64) { CON let mut iter = row.iter(); @@ -37,4 +35,9 @@ CON .fold(0f64, |acc, v| acc+v); ADD let average = sum / (row.len() as f64); ADD let filtered: f64 = raw ADD .iter() ADD .filter(|&v| *v >= average) ADD .fold(0f64, |acc, v| acc+v); CON DEL (time, sum) ADD (time, sum, filtered) CON }
<<<<<<< SEARCH for (_, r) in reader.enumerate() { let row = r.unwrap(); let (time, brightness) = sum(row); writer.write( &vec!(time.to_string(), brightness.to_string()) ).unwrap(); } } fn sum(row: Vec<String>) -> (f64, f64) { let mut iter = row.iter(); let time: f64 = f64::from_str(iter.next().unwrap()).unwrap(); ======= for (_, r) in reader.enumerate() { let row = r.unwrap(); let (time, brightness, filtered) = sum(row); writer.write( &vec!(time.to_string(), brightness.to_string(), filtered.to_string()) ).unwrap(); } } fn sum(row: Vec<String>) -> (f64, f64, f64) { let mut iter = row.iter(); let time: f64 = f64::from_str(iter.next().unwrap()).unwrap(); >>>>>>> REPLACE <<<<<<< SEARCH .iter() .fold(0f64, |acc, v| acc+v); (time, sum) } ======= .iter() .fold(0f64, |acc, v| acc+v); let average = sum / (row.len() as f64); let filtered: f64 = raw .iter() .filter(|&v| *v >= average) .fold(0f64, |acc, v| acc+v); (time, sum, filtered) } >>>>>>> REPLACE
stufisher/ispyb-php
6ac3041e342f15a38def80b0c0f967313a14a02c
templates/js/pages/protein_view.js
javascript
apache-2.0
Make sequence editor text area
$(function() { var dt = {sPaginationType: 'full_numbers', bProcessing: true, bServerSide: true, sAjaxSource: '/sample/ajax/pid/'+pid+'/', bAutoWidth:false , aaSorting: [[ 0, 'desc' ]], //fnDrawCallback: _map_callbacks(), fnServerData: function ( sSource, aoData, fnCallback ) { $.getJSON( sSource, aoData, function (json) { fnCallback(json) _map_callbacks() }) } } if ($(window).width() <= 600) dt = $.extend({ 'bScrollCollapse': true, 'sScrollX': '100%', }, dt) $('.robot_actions').dataTable(dt) function _map_callbacks() { //setTimeout(function() { $('a.view').button({ icons: { primary: 'ui-icon-search' } }) //}, 500) } $.each(['name', 'acronym', 'mass'], function(i,e) { $('.'+e).editable('/sample/ajax/updatep/pid/'+pid+'/ty/'+e+'/', { height: '100%', type: 'text', submit: 'Ok', style: 'display: inline', }).addClass('editable'); }) $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { type: 'text', height: '100%', width: '50%', submit: 'Ok', style: 'display: inline', }).addClass('editable'); })
$(function() { var dt = {sPaginationType: 'full_numbers', bProcessing: true, bServerSide: true, sAjaxSource: '/sample/ajax/pid/'+pid+'/', bAutoWidth:false , aaSorting: [[ 0, 'desc' ]], //fnDrawCallback: _map_callbacks(), fnServerData: function ( sSource, aoData, fnCallback ) { $.getJSON( sSource, aoData, function (json) { fnCallback(json) _map_callbacks() }) } } if ($(window).width() <= 600) dt = $.extend({ 'bScrollCollapse': true, 'sScrollX': '100%', }, dt) $('.robot_actions').dataTable(dt) function _map_callbacks() { //setTimeout(function() { $('a.view').button({ icons: { primary: 'ui-icon-search' } }) //}, 500) } $.each(['name', 'acronym', 'mass'], function(i,e) { $('.'+e).editable('/sample/ajax/updatep/pid/'+pid+'/ty/'+e+'/', { height: '100%', type: 'text', submit: 'Ok', style: 'display: inline', }).addClass('editable'); }) $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { type: 'textarea', rows: 5, width: '100%', submit: 'Ok', style: 'display: inline', }).addClass('editable'); })
4
3
1
mixed
--- a/templates/js/pages/protein_view.js +++ b/templates/js/pages/protein_view.js @@ -41,7 +41,8 @@ $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { - type: 'text', - height: '100%', - width: '50%', + type: 'textarea', + rows: 5, + width: '100%', submit: 'Ok', style: 'display: inline', + }).addClass('editable');
--- a/templates/js/pages/protein_view.js +++ b/templates/js/pages/protein_view.js @@ ... @@ $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { - type: 'text', - height: '100%', - width: '50%', + type: 'textarea', + rows: 5, + width: '100%', submit: 'Ok', style: 'display: inline', + }).addClass('editable');
--- a/templates/js/pages/protein_view.js +++ b/templates/js/pages/protein_view.js @@ -41,7 +41,8 @@ CON $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { DEL type: 'text', DEL height: '100%', DEL width: '50%', ADD type: 'textarea', ADD rows: 5, ADD width: '100%', CON submit: 'Ok', CON style: 'display: inline', ADD CON }).addClass('editable');
<<<<<<< SEARCH $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { type: 'text', height: '100%', width: '50%', submit: 'Ok', style: 'display: inline', }).addClass('editable'); }) ======= $('.seq').editable('/sample/ajax/updatep/pid/'+pid+'/ty/seq/', { type: 'textarea', rows: 5, width: '100%', submit: 'Ok', style: 'display: inline', }).addClass('editable'); }) >>>>>>> REPLACE
rtfd/readthedocs.org
29c437e15f7793886c80b71ca6764184caff2597
readthedocs/oauth/management/commands/load_project_remote_repo_relation.py
python
mit
Check if the remote_repo was updated or not and log error
import json from django.core.management.base import BaseCommand from readthedocs.oauth.models import RemoteRepository class Command(BaseCommand): help = "Load Project and RemoteRepository Relationship from JSON file" def add_arguments(self, parser): # File path of the json file containing relationship data parser.add_argument( '--file', required=True, nargs=1, type=str, help='File path of the json file containing relationship data.', ) def handle(self, *args, **options): file = options.get('file')[0] try: # Load data from the json file with open(file, 'r') as f: data = json.load(f) except Exception as e: self.stdout.write( self.style.ERROR( f'Exception occurred while trying to load the file "{file}". ' f'Exception: {e}.' ) ) return for item in data: try: RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) except Exception as e: self.stdout.write( self.style.ERROR( f"Exception occurred while trying to update {item['slug']}'s " f"relationship with {item['html_url']}, " f"username: {item['username']}, Exception: {e}." ) )
import json from django.core.management.base import BaseCommand from readthedocs.oauth.models import RemoteRepository class Command(BaseCommand): help = "Load Project and RemoteRepository Relationship from JSON file" def add_arguments(self, parser): # File path of the json file containing relationship data parser.add_argument( '--file', required=True, nargs=1, type=str, help='File path of the json file containing relationship data.', ) def handle(self, *args, **options): file = options.get('file')[0] try: # Load data from the json file with open(file, 'r') as f: data = json.load(f) except Exception as e: self.stdout.write( self.style.ERROR( f'Exception occurred while trying to load the file "{file}". ' f'Exception: {e}.' ) ) return for item in data: try: update_count = RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) if update_count < 1: self.stdout.write( self.style.ERROR( f"Could not update {item['slug']}'s " f"relationship with {item['html_url']}, " f"remote_id {item['remote_id']}, " f"username: {item['username']}." ) ) except Exception as e: self.stdout.write( self.style.ERROR( f"Exception occurred while trying to update {item['slug']}'s " f"relationship with {item['html_url']}, " f"username: {item['username']}, Exception: {e}." ) )
11
1
1
mixed
--- a/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py +++ b/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py @@ -38,5 +38,15 @@ try: - RemoteRepository.objects.filter( + update_count = RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) + + if update_count < 1: + self.stdout.write( + self.style.ERROR( + f"Could not update {item['slug']}'s " + f"relationship with {item['html_url']}, " + f"remote_id {item['remote_id']}, " + f"username: {item['username']}." + ) + )
--- a/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py +++ b/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py @@ ... @@ try: - RemoteRepository.objects.filter( + update_count = RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) + + if update_count < 1: + self.stdout.write( + self.style.ERROR( + f"Could not update {item['slug']}'s " + f"relationship with {item['html_url']}, " + f"remote_id {item['remote_id']}, " + f"username: {item['username']}." + ) + )
--- a/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py +++ b/readthedocs/oauth/management/commands/load_project_remote_repo_relation.py @@ -38,5 +38,15 @@ CON try: DEL RemoteRepository.objects.filter( ADD update_count = RemoteRepository.objects.filter( CON remote_id=item['remote_id'] CON ).update(project_id=item['project_id']) ADD ADD if update_count < 1: ADD self.stdout.write( ADD self.style.ERROR( ADD f"Could not update {item['slug']}'s " ADD f"relationship with {item['html_url']}, " ADD f"remote_id {item['remote_id']}, " ADD f"username: {item['username']}." ADD ) ADD ) CON
<<<<<<< SEARCH for item in data: try: RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) except Exception as e: ======= for item in data: try: update_count = RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) if update_count < 1: self.stdout.write( self.style.ERROR( f"Could not update {item['slug']}'s " f"relationship with {item['html_url']}, " f"remote_id {item['remote_id']}, " f"username: {item['username']}." ) ) except Exception as e: >>>>>>> REPLACE
wordpress-mobile/AztecEditor-Android
19b00891816feccf9166dd958e63b65bb98444e6
aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt
kotlin
mpl-2.0
Add documentation to the comment handler plugin
package org.wordpress.aztec.plugins import android.annotation.SuppressLint import android.content.Context import android.text.Editable import android.text.Spanned import android.text.style.CharacterStyle import org.wordpress.aztec.plugins.IAztecPlugin @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { fun canHandle(span: CharacterStyle): Boolean { return true } fun shouldParseContent(): Boolean { return true } fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { return true } fun handleCommentSpanStart(out: StringBuilder, span: CharacterStyle) fun handleCommentSpanEnd(out: StringBuilder, span: CharacterStyle) }
package org.wordpress.aztec.plugins import android.annotation.SuppressLint import android.content.Context import android.text.Editable import android.text.Spanned import android.text.style.CharacterStyle import org.wordpress.aztec.plugins.IAztecPlugin /** * An interface for HTML comment processing plugins. */ @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { /** * Determines, whether the content of a comment (the text) should be parsed/rendered by [org.wordpress.aztec.AztecParser] * * @return true if text should be parsed, false otherwise. */ fun shouldParseContent(): Boolean { return true } /** * A plugin handler used by [org.wordpress.aztec.Html] parser during HTML-to-span parsing. * * This method is called when a comment is encountered in HTML. * * @param text the content/text of the comment. * @param output the parsed output [Editable], used for span manipulation. * @param context the Android context. * @param nestingLevel the nesting level within the HTML DOM tree. * * @return true if this plugin handled the comment and no other handler should be called, false otherwise. */ fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { return true } /** * Determines, whether the plugin can handle a particular [span] type. * * This method is called by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * @return true for compatible spans, false otherwise. */ fun canHandle(span: CharacterStyle): Boolean { return true } /** * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * This method is called when the beginning of a compatible span is encountered. * * @param html the resulting HTML string output. * @param span the encountered span. */ fun handleCommentSpanStart(html: StringBuilder, span: CharacterStyle) /** * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * This method is called when the ending of a compatible span is encountered. * * @param html the resulting HTML string output. * @param span the encountered span. */ fun handleCommentSpanEnd(html: StringBuilder, span: CharacterStyle) }
52
9
2
mixed
--- a/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt +++ b/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt @@ -9,5 +9,39 @@ +/** + * An interface for HTML comment processing plugins. + */ @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { + /** + * Determines, whether the content of a comment (the text) should be parsed/rendered by [org.wordpress.aztec.AztecParser] + * + * @return true if text should be parsed, false otherwise. + */ + fun shouldParseContent(): Boolean { + return true + } + /** + * A plugin handler used by [org.wordpress.aztec.Html] parser during HTML-to-span parsing. + * + * This method is called when a comment is encountered in HTML. + * + * @param text the content/text of the comment. + * @param output the parsed output [Editable], used for span manipulation. + * @param context the Android context. + * @param nestingLevel the nesting level within the HTML DOM tree. + * + * @return true if this plugin handled the comment and no other handler should be called, false otherwise. + */ + fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { + return true + } + + /** + * Determines, whether the plugin can handle a particular [span] type. + * + * This method is called by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * @return true for compatible spans, false otherwise. + */ fun canHandle(span: CharacterStyle): Boolean { @@ -16,12 +50,21 @@ - fun shouldParseContent(): Boolean { - return true - } + /** + * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * This method is called when the beginning of a compatible span is encountered. + * + * @param html the resulting HTML string output. + * @param span the encountered span. + */ + fun handleCommentSpanStart(html: StringBuilder, span: CharacterStyle) - fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { - return true - } - - fun handleCommentSpanStart(out: StringBuilder, span: CharacterStyle) - fun handleCommentSpanEnd(out: StringBuilder, span: CharacterStyle) + /** + * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * This method is called when the ending of a compatible span is encountered. + * + * @param html the resulting HTML string output. + * @param span the encountered span. + */ + fun handleCommentSpanEnd(html: StringBuilder, span: CharacterStyle) }
--- a/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt +++ b/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt @@ ... @@ +/** + * An interface for HTML comment processing plugins. + */ @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { + /** + * Determines, whether the content of a comment (the text) should be parsed/rendered by [org.wordpress.aztec.AztecParser] + * + * @return true if text should be parsed, false otherwise. + */ + fun shouldParseContent(): Boolean { + return true + } + /** + * A plugin handler used by [org.wordpress.aztec.Html] parser during HTML-to-span parsing. + * + * This method is called when a comment is encountered in HTML. + * + * @param text the content/text of the comment. + * @param output the parsed output [Editable], used for span manipulation. + * @param context the Android context. + * @param nestingLevel the nesting level within the HTML DOM tree. + * + * @return true if this plugin handled the comment and no other handler should be called, false otherwise. + */ + fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { + return true + } + + /** + * Determines, whether the plugin can handle a particular [span] type. + * + * This method is called by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * @return true for compatible spans, false otherwise. + */ fun canHandle(span: CharacterStyle): Boolean { @@ ... @@ - fun shouldParseContent(): Boolean { - return true - } + /** + * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * This method is called when the beginning of a compatible span is encountered. + * + * @param html the resulting HTML string output. + * @param span the encountered span. + */ + fun handleCommentSpanStart(html: StringBuilder, span: CharacterStyle) - fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { - return true - } - - fun handleCommentSpanStart(out: StringBuilder, span: CharacterStyle) - fun handleCommentSpanEnd(out: StringBuilder, span: CharacterStyle) + /** + * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. + * + * This method is called when the ending of a compatible span is encountered. + * + * @param html the resulting HTML string output. + * @param span the encountered span. + */ + fun handleCommentSpanEnd(html: StringBuilder, span: CharacterStyle) }
--- a/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt +++ b/aztec/src/main/kotlin/org/wordpress/aztec/plugins/ICommentHandler.kt @@ -9,5 +9,39 @@ CON ADD /** ADD * An interface for HTML comment processing plugins. ADD */ CON @SuppressLint("NewApi") CON interface ICommentHandler : IAztecPlugin { ADD /** ADD * Determines, whether the content of a comment (the text) should be parsed/rendered by [org.wordpress.aztec.AztecParser] ADD * ADD * @return true if text should be parsed, false otherwise. ADD */ ADD fun shouldParseContent(): Boolean { ADD return true ADD } CON ADD /** ADD * A plugin handler used by [org.wordpress.aztec.Html] parser during HTML-to-span parsing. ADD * ADD * This method is called when a comment is encountered in HTML. ADD * ADD * @param text the content/text of the comment. ADD * @param output the parsed output [Editable], used for span manipulation. ADD * @param context the Android context. ADD * @param nestingLevel the nesting level within the HTML DOM tree. ADD * ADD * @return true if this plugin handled the comment and no other handler should be called, false otherwise. ADD */ ADD fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { ADD return true ADD } ADD ADD /** ADD * Determines, whether the plugin can handle a particular [span] type. ADD * ADD * This method is called by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. ADD * ADD * @return true for compatible spans, false otherwise. ADD */ CON fun canHandle(span: CharacterStyle): Boolean { @@ -16,12 +50,21 @@ CON DEL fun shouldParseContent(): Boolean { DEL return true DEL } ADD /** ADD * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. ADD * ADD * This method is called when the beginning of a compatible span is encountered. ADD * ADD * @param html the resulting HTML string output. ADD * @param span the encountered span. ADD */ ADD fun handleCommentSpanStart(html: StringBuilder, span: CharacterStyle) CON DEL fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { DEL return true DEL } DEL DEL fun handleCommentSpanStart(out: StringBuilder, span: CharacterStyle) DEL fun handleCommentSpanEnd(out: StringBuilder, span: CharacterStyle) ADD /** ADD * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. ADD * ADD * This method is called when the ending of a compatible span is encountered. ADD * ADD * @param html the resulting HTML string output. ADD * @param span the encountered span. ADD */ ADD fun handleCommentSpanEnd(html: StringBuilder, span: CharacterStyle) CON }
<<<<<<< SEARCH import org.wordpress.aztec.plugins.IAztecPlugin @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { fun canHandle(span: CharacterStyle): Boolean { return true } fun shouldParseContent(): Boolean { return true } fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { return true } fun handleCommentSpanStart(out: StringBuilder, span: CharacterStyle) fun handleCommentSpanEnd(out: StringBuilder, span: CharacterStyle) } ======= import org.wordpress.aztec.plugins.IAztecPlugin /** * An interface for HTML comment processing plugins. */ @SuppressLint("NewApi") interface ICommentHandler : IAztecPlugin { /** * Determines, whether the content of a comment (the text) should be parsed/rendered by [org.wordpress.aztec.AztecParser] * * @return true if text should be parsed, false otherwise. */ fun shouldParseContent(): Boolean { return true } /** * A plugin handler used by [org.wordpress.aztec.Html] parser during HTML-to-span parsing. * * This method is called when a comment is encountered in HTML. * * @param text the content/text of the comment. * @param output the parsed output [Editable], used for span manipulation. * @param context the Android context. * @param nestingLevel the nesting level within the HTML DOM tree. * * @return true if this plugin handled the comment and no other handler should be called, false otherwise. */ fun handleCommentHtml(text: String, output: Editable, context: Context, nestingLevel: Int) : Boolean { return true } /** * Determines, whether the plugin can handle a particular [span] type. * * This method is called by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * @return true for compatible spans, false otherwise. */ fun canHandle(span: CharacterStyle): Boolean { return true } /** * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * This method is called when the beginning of a compatible span is encountered. * * @param html the resulting HTML string output. * @param span the encountered span. */ fun handleCommentSpanStart(html: StringBuilder, span: CharacterStyle) /** * A plugin handler used by [org.wordpress.aztec.AztecParser] during span-to-HTML parsing. * * This method is called when the ending of a compatible span is encountered. * * @param html the resulting HTML string output. * @param span the encountered span. */ fun handleCommentSpanEnd(html: StringBuilder, span: CharacterStyle) } >>>>>>> REPLACE
jguerinet/MyMartlet
7317493493429f8f8ee893a320766bdcf3ad2217
parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt
kotlin
apache-2.0
Use NoSuchElementException to capture missing char element
/* * Copyright 2014-2019 Julien Guerinet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.guerinet.mymartlet.parser import org.threeten.bp.DayOfWeek import kotlin.test.Test import kotlin.test.fail /** * Tests for parser.General.kt * * @author Allan Wang * @since 2.3.2 */ class GeneralTest { /** * While not all characters have an associated day, * all [DayOfWeek] should have an associated character */ @Test fun `all DayOfWeek enums accounted for`() { DayOfWeek.values().forEach { try { DayUtils.dayToChar(it) } catch (e: Exception) { fail("Could not get associated char for ${it.name}") } } } }
/* * Copyright 2014-2019 Julien Guerinet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.guerinet.mymartlet.parser import org.threeten.bp.DayOfWeek import kotlin.test.Test import kotlin.test.fail /** * Tests for parser.General.kt * * @author Allan Wang * @since 2.3.2 */ class GeneralTest { /** * While not all characters have an associated day, * all [DayOfWeek] should have an associated character */ @Test fun `all DayOfWeek enums accounted for`() { DayOfWeek.values().forEach { try { DayUtils.dayToChar(it) } catch (e: NoSuchElementException) { fail("Could not get associated char for ${it.name}") } } } }
1
1
1
mixed
--- a/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt +++ b/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt @@ -39,3 +39,3 @@ DayUtils.dayToChar(it) - } catch (e: Exception) { + } catch (e: NoSuchElementException) { fail("Could not get associated char for ${it.name}")
--- a/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt +++ b/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt @@ ... @@ DayUtils.dayToChar(it) - } catch (e: Exception) { + } catch (e: NoSuchElementException) { fail("Could not get associated char for ${it.name}")
--- a/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt +++ b/parser/src/test/kotlin/com/guerinet/mymartlet/parser/GeneralTest.kt @@ -39,3 +39,3 @@ CON DayUtils.dayToChar(it) DEL } catch (e: Exception) { ADD } catch (e: NoSuchElementException) { CON fail("Could not get associated char for ${it.name}")
<<<<<<< SEARCH try { DayUtils.dayToChar(it) } catch (e: Exception) { fail("Could not get associated char for ${it.name}") } ======= try { DayUtils.dayToChar(it) } catch (e: NoSuchElementException) { fail("Could not get associated char for ${it.name}") } >>>>>>> REPLACE
alexstyl/Memento-Namedays
a27cab3feef72a46521e4521b3c2d5666312e731
android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt
kotlin
mit
Move const to companion object
package com.alexstyl.specialdates.upcoming import com.alexstyl.specialdates.date.Date import com.alexstyl.specialdates.date.TimePeriod import com.alexstyl.specialdates.permissions.ContactPermissionRequest import io.reactivex.Scheduler import io.reactivex.disposables.Disposable import io.reactivex.subjects.PublishSubject internal class UpcomingEventsPresenter(private val firstDay: Date, private val permissions: ContactPermissionRequest, private val provider: IUpcomingEventsProvider, private val workScheduler: Scheduler, private val resultScheduler: Scheduler) { private val TRIGGER = 1 private val subject = PublishSubject.create<Int>() private var disposable: Disposable? = null fun startPresentingInto(view: UpcomingListMVPView) { disposable = subject .doOnSubscribe { if (view.isEmpty) { view.showLoading() } } .observeOn(workScheduler) .map { provider.calculateEventsBetween(TimePeriod.aYearFrom(firstDay)) } .observeOn(resultScheduler) .subscribe { upcomingRowViewModels -> view.display(upcomingRowViewModels) } if (permissions.permissionIsPresent()) { refreshEvents() } else { view.askForContactPermission() } } fun refreshEvents() { subject.onNext(TRIGGER) } fun stopPresenting() { disposable?.dispose() } }
package com.alexstyl.specialdates.upcoming import com.alexstyl.specialdates.date.Date import com.alexstyl.specialdates.date.TimePeriod import com.alexstyl.specialdates.permissions.ContactPermissionRequest import io.reactivex.Scheduler import io.reactivex.disposables.Disposable import io.reactivex.subjects.PublishSubject internal class UpcomingEventsPresenter(private val firstDay: Date, private val permissions: ContactPermissionRequest, private val provider: IUpcomingEventsProvider, private val workScheduler: Scheduler, private val resultScheduler: Scheduler) { companion object { private const val TRIGGER = 1 } private val subject = PublishSubject.create<Int>() private var disposable: Disposable? = null fun startPresentingInto(view: UpcomingListMVPView) { disposable = subject .doOnSubscribe { if (view.isEmpty) { view.showLoading() } } .observeOn(workScheduler) .map { provider.calculateEventsBetween(TimePeriod.aYearFrom(firstDay)) } .observeOn(resultScheduler) .subscribe { upcomingRowViewModels -> view.display(upcomingRowViewModels) } if (permissions.permissionIsPresent()) { refreshEvents() } else { view.askForContactPermission() } } fun refreshEvents() { subject.onNext(TRIGGER) } fun stopPresenting() { disposable?.dispose() } }
5
1
1
mixed
--- a/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt +++ b/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt @@ -15,3 +15,7 @@ - private val TRIGGER = 1 + + companion object { + private const val TRIGGER = 1 + } + private val subject = PublishSubject.create<Int>()
--- a/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt +++ b/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt @@ ... @@ - private val TRIGGER = 1 + + companion object { + private const val TRIGGER = 1 + } + private val subject = PublishSubject.create<Int>()
--- a/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt +++ b/android_mobile/src/main/java/com/alexstyl/specialdates/upcoming/UpcomingEventsPresenter.kt @@ -15,3 +15,7 @@ CON DEL private val TRIGGER = 1 ADD ADD companion object { ADD private const val TRIGGER = 1 ADD } ADD CON private val subject = PublishSubject.create<Int>()
<<<<<<< SEARCH private val resultScheduler: Scheduler) { private val TRIGGER = 1 private val subject = PublishSubject.create<Int>() private var disposable: Disposable? = null ======= private val resultScheduler: Scheduler) { companion object { private const val TRIGGER = 1 } private val subject = PublishSubject.create<Int>() private var disposable: Disposable? = null >>>>>>> REPLACE
diesel-rs/diesel
a19f7126b5452a9185da747b960225863b94f733
yaqb/src/query_builder/update_statement/changeset.rs
rust
apache-2.0
Implement `Changeset` for `Vec<T>` and `Box<T>` A valid use case has arisen in #26 that is impossible to work around at the moment. We're going to eventually add a standard API to handle this, so this implementation has no tests. It will get tested through that API, once we've figured out what the API will look like. But I want to get this through so it's possible to work around this by implementing `AsChangeset` manually.
use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::QuerySource; pub trait AsChangeset { type Changeset: Changeset; fn as_changeset(self) -> Self::Changeset; } pub trait Changeset { type Target: QuerySource; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult; } impl<T> AsChangeset for T where T: Changeset, { type Changeset = Self; fn as_changeset(self) -> Self::Changeset { self } }
use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::QuerySource; pub trait AsChangeset { type Changeset: Changeset; fn as_changeset(self) -> Self::Changeset; } pub trait Changeset { type Target: QuerySource; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult; } impl<T> AsChangeset for T where T: Changeset, { type Changeset = Self; fn as_changeset(self) -> Self::Changeset { self } } impl<T: Changeset> Changeset for Vec<T> { type Target = T::Target; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, changeset) in self.iter().enumerate() { if i != 0 { out.push_sql(", "); } try!(changeset.to_sql(out)) } Ok(()) } } impl<T: Changeset> Changeset for Box<T> { type Target = T::Target; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { (&**self).to_sql(out) } }
22
0
1
add_only
--- a/yaqb/src/query_builder/update_statement/changeset.rs +++ b/yaqb/src/query_builder/update_statement/changeset.rs @@ -24 +24,23 @@ } + +impl<T: Changeset> Changeset for Vec<T> { + type Target = T::Target; + + fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { + for (i, changeset) in self.iter().enumerate() { + if i != 0 { + out.push_sql(", "); + } + try!(changeset.to_sql(out)) + } + Ok(()) + } +} + +impl<T: Changeset> Changeset for Box<T> { + type Target = T::Target; + + fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { + (&**self).to_sql(out) + } +}
--- a/yaqb/src/query_builder/update_statement/changeset.rs +++ b/yaqb/src/query_builder/update_statement/changeset.rs @@ ... @@ } + +impl<T: Changeset> Changeset for Vec<T> { + type Target = T::Target; + + fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { + for (i, changeset) in self.iter().enumerate() { + if i != 0 { + out.push_sql(", "); + } + try!(changeset.to_sql(out)) + } + Ok(()) + } +} + +impl<T: Changeset> Changeset for Box<T> { + type Target = T::Target; + + fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { + (&**self).to_sql(out) + } +}
--- a/yaqb/src/query_builder/update_statement/changeset.rs +++ b/yaqb/src/query_builder/update_statement/changeset.rs @@ -24 +24,23 @@ CON } ADD ADD impl<T: Changeset> Changeset for Vec<T> { ADD type Target = T::Target; ADD ADD fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { ADD for (i, changeset) in self.iter().enumerate() { ADD if i != 0 { ADD out.push_sql(", "); ADD } ADD try!(changeset.to_sql(out)) ADD } ADD Ok(()) ADD } ADD } ADD ADD impl<T: Changeset> Changeset for Box<T> { ADD type Target = T::Target; ADD ADD fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { ADD (&**self).to_sql(out) ADD } ADD }
<<<<<<< SEARCH } } ======= } } impl<T: Changeset> Changeset for Vec<T> { type Target = T::Target; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, changeset) in self.iter().enumerate() { if i != 0 { out.push_sql(", "); } try!(changeset.to_sql(out)) } Ok(()) } } impl<T: Changeset> Changeset for Box<T> { type Target = T::Target; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { (&**self).to_sql(out) } } >>>>>>> REPLACE
kokarn/KokBot
c48c444a0c6cf22fcafc12310881c9851f05cfcc
Urlchecker.botplug.js
javascript
mit
Add user agent so we don't get bounced by sites requiring it (e.g. SoundCloud)
'use strict'; var request = require( 'request' ), cheerio = require( 'cheerio' ), urlChecker = { bot : false, setup : function( bot ){ var _this = this, channel; _this.bot = bot; for( channel in _this.bot.opt.channels ){ if( _this.bot.opt.channels.hasOwnProperty( channel ) ){ bot.addListener( 'message' + this.bot.opt.channels[ channel ], _this.handleMessage ); } } }, handleMessage : function( from, text, message ){ var url, urlRegex = /(https?:\/\/[^\s]+)/g; url = text.match( urlRegex ); if( url !== null ){ request( url[ 0 ], function( error, response, html ){ var $ = cheerio.load( html ), pageTitle; if( error ){ console.log( error ); } pageTitle = $( 'title' ).text(); urlChecker.sendMessage( message.args[ 0 ], pageTitle ); }); } }, sendMessage : function( channel, message ){ this.bot.say( channel, message.trim() ); } }; module.exports = urlChecker;
'use strict'; var request = require( 'request' ), cheerio = require( 'cheerio' ), urlChecker = { bot : false, setup : function( bot ){ var _this = this, channel; _this.bot = bot; for( channel in _this.bot.opt.channels ){ if( _this.bot.opt.channels.hasOwnProperty( channel ) ){ bot.addListener( 'message' + this.bot.opt.channels[ channel ], _this.handleMessage ); } } }, handleMessage : function( from, text, message ){ var url, urlRegex = /(https?:\/\/[^\s]+)/g, options = { headers: { 'User-Agent': 'request' } }; url = text.match( urlRegex ); if( url !== null ){ options.url = url[ 0 ]; request( options, function( error, response, html ){ var $ = cheerio.load( html ), pageTitle; if( error ){ console.log( error ); } pageTitle = $( 'title' ).text(); urlChecker.sendMessage( message.args[ 0 ], pageTitle ); }); } }, sendMessage : function( channel, message ){ this.bot.say( channel, message.trim() ); } }; module.exports = urlChecker;
8
2
2
mixed
--- a/Urlchecker.botplug.js +++ b/Urlchecker.botplug.js @@ -19,3 +19,8 @@ var url, - urlRegex = /(https?:\/\/[^\s]+)/g; + urlRegex = /(https?:\/\/[^\s]+)/g, + options = { + headers: { + 'User-Agent': 'request' + } + }; @@ -24,3 +29,4 @@ if( url !== null ){ - request( url[ 0 ], function( error, response, html ){ + options.url = url[ 0 ]; + request( options, function( error, response, html ){ var $ = cheerio.load( html ),
--- a/Urlchecker.botplug.js +++ b/Urlchecker.botplug.js @@ ... @@ var url, - urlRegex = /(https?:\/\/[^\s]+)/g; + urlRegex = /(https?:\/\/[^\s]+)/g, + options = { + headers: { + 'User-Agent': 'request' + } + }; @@ ... @@ if( url !== null ){ - request( url[ 0 ], function( error, response, html ){ + options.url = url[ 0 ]; + request( options, function( error, response, html ){ var $ = cheerio.load( html ),
--- a/Urlchecker.botplug.js +++ b/Urlchecker.botplug.js @@ -19,3 +19,8 @@ CON var url, DEL urlRegex = /(https?:\/\/[^\s]+)/g; ADD urlRegex = /(https?:\/\/[^\s]+)/g, ADD options = { ADD headers: { ADD 'User-Agent': 'request' ADD } ADD }; CON @@ -24,3 +29,4 @@ CON if( url !== null ){ DEL request( url[ 0 ], function( error, response, html ){ ADD options.url = url[ 0 ]; ADD request( options, function( error, response, html ){ CON var $ = cheerio.load( html ),
<<<<<<< SEARCH handleMessage : function( from, text, message ){ var url, urlRegex = /(https?:\/\/[^\s]+)/g; url = text.match( urlRegex ); if( url !== null ){ request( url[ 0 ], function( error, response, html ){ var $ = cheerio.load( html ), pageTitle; ======= handleMessage : function( from, text, message ){ var url, urlRegex = /(https?:\/\/[^\s]+)/g, options = { headers: { 'User-Agent': 'request' } }; url = text.match( urlRegex ); if( url !== null ){ options.url = url[ 0 ]; request( options, function( error, response, html ){ var $ = cheerio.load( html ), pageTitle; >>>>>>> REPLACE
chrisbanes/tivi
1a5abf19878cf8430d932ff1c581f0f2184a594c
app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt
kotlin
apache-2.0
Fix Toolbar tinting animation on release build
/* * Copyright 2018 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.tivi.ui.widget import android.content.Context import android.support.v7.widget.Toolbar import android.util.AttributeSet import app.tivi.R import app.tivi.extensions.resolveColor class TintingToolbar @JvmOverloads constructor( context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = R.attr.toolbarStyle ) : Toolbar(context, attrs, defStyleAttr) { var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { navigationIcon = navigationIcon?.let { it.setTint(value) it.mutate() } overflowIcon = overflowIcon?.let { it.setTint(value) it.mutate() } } }
/* * Copyright 2018 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.tivi.ui.widget import android.content.Context import android.support.annotation.Keep import android.support.v7.widget.Toolbar import android.util.AttributeSet import app.tivi.R import app.tivi.extensions.resolveColor class TintingToolbar @JvmOverloads constructor( context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = R.attr.toolbarStyle ) : Toolbar(context, attrs, defStyleAttr) { @get:Keep @set:Keep var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { if (value != field) { navigationIcon = navigationIcon?.let { it.setTint(value) it.mutate() } overflowIcon = overflowIcon?.let { it.setTint(value) it.mutate() } } field = value } }
13
8
2
mixed
--- a/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt +++ b/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt @@ -19,2 +19,3 @@ import android.content.Context +import android.support.annotation.Keep import android.support.v7.widget.Toolbar @@ -29,13 +30,17 @@ ) : Toolbar(context, attrs, defStyleAttr) { - + @get:Keep + @set:Keep var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { - navigationIcon = navigationIcon?.let { - it.setTint(value) - it.mutate() + if (value != field) { + navigationIcon = navigationIcon?.let { + it.setTint(value) + it.mutate() + } + overflowIcon = overflowIcon?.let { + it.setTint(value) + it.mutate() + } } - overflowIcon = overflowIcon?.let { - it.setTint(value) - it.mutate() - } + field = value }
--- a/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt +++ b/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt @@ ... @@ import android.content.Context +import android.support.annotation.Keep import android.support.v7.widget.Toolbar @@ ... @@ ) : Toolbar(context, attrs, defStyleAttr) { - + @get:Keep + @set:Keep var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { - navigationIcon = navigationIcon?.let { - it.setTint(value) - it.mutate() + if (value != field) { + navigationIcon = navigationIcon?.let { + it.setTint(value) + it.mutate() + } + overflowIcon = overflowIcon?.let { + it.setTint(value) + it.mutate() + } } - overflowIcon = overflowIcon?.let { - it.setTint(value) - it.mutate() - } + field = value }
--- a/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt +++ b/app/src/main/java/app/tivi/ui/widget/TintingToolbar.kt @@ -19,2 +19,3 @@ CON import android.content.Context ADD import android.support.annotation.Keep CON import android.support.v7.widget.Toolbar @@ -29,13 +30,17 @@ CON ) : Toolbar(context, attrs, defStyleAttr) { DEL ADD @get:Keep ADD @set:Keep CON var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) CON set(value) { DEL navigationIcon = navigationIcon?.let { DEL it.setTint(value) DEL it.mutate() ADD if (value != field) { ADD navigationIcon = navigationIcon?.let { ADD it.setTint(value) ADD it.mutate() ADD } ADD overflowIcon = overflowIcon?.let { ADD it.setTint(value) ADD it.mutate() ADD } CON } DEL overflowIcon = overflowIcon?.let { DEL it.setTint(value) DEL it.mutate() DEL } ADD field = value CON }
<<<<<<< SEARCH import android.content.Context import android.support.v7.widget.Toolbar import android.util.AttributeSet ======= import android.content.Context import android.support.annotation.Keep import android.support.v7.widget.Toolbar import android.util.AttributeSet >>>>>>> REPLACE <<<<<<< SEARCH defStyleAttr: Int = R.attr.toolbarStyle ) : Toolbar(context, attrs, defStyleAttr) { var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { navigationIcon = navigationIcon?.let { it.setTint(value) it.mutate() } overflowIcon = overflowIcon?.let { it.setTint(value) it.mutate() } } } ======= defStyleAttr: Int = R.attr.toolbarStyle ) : Toolbar(context, attrs, defStyleAttr) { @get:Keep @set:Keep var iconTint: Int = context.theme.resolveColor(android.R.attr.colorControlNormal) set(value) { if (value != field) { navigationIcon = navigationIcon?.let { it.setTint(value) it.mutate() } overflowIcon = overflowIcon?.let { it.setTint(value) it.mutate() } } field = value } } >>>>>>> REPLACE
hfiguiere/exempi-rs
4f7f4b8023325efeb76ac3172c349fc47d7cf84b
src/xmpstring.rs
rust
bsd-2-clause
Implement Eq trait for XmpString
use ::c; use std::str; use std::ffi::{CStr}; pub struct XmpString { ptr: *mut c::XmpString } impl XmpString { pub fn new() -> XmpString { XmpString { ptr: unsafe { c::xmp_string_new() } } } pub fn is_null(&self) -> bool { self.ptr.is_null() } pub fn as_ptr(&self) -> *const c::XmpString { self.ptr } pub fn as_mut_ptr(&mut self) -> *mut c::XmpString { self.ptr } // XXX properly deal with the utf8 error pub fn to_str(&self) -> &str { unsafe { let s = CStr::from_ptr(c::xmp_string_cstr(self.ptr)); // we are supposed to receive UTF8 from the library. str::from_utf8_unchecked(s.to_bytes()) } } } impl Drop for XmpString { fn drop(&mut self) { if !self.is_null() { unsafe { c::xmp_string_free(self.ptr) }; } } }
use ::c; use std::str; use std::ffi::{CStr}; pub struct XmpString { ptr: *mut c::XmpString } impl XmpString { pub fn new() -> XmpString { XmpString { ptr: unsafe { c::xmp_string_new() } } } pub fn is_null(&self) -> bool { self.ptr.is_null() } pub fn as_ptr(&self) -> *const c::XmpString { self.ptr } pub fn as_mut_ptr(&mut self) -> *mut c::XmpString { self.ptr } // XXX properly deal with the utf8 error pub fn to_str(&self) -> &str { unsafe { let s = CStr::from_ptr(c::xmp_string_cstr(self.ptr)); // we are supposed to receive UTF8 from the library. str::from_utf8_unchecked(s.to_bytes()) } } } impl Drop for XmpString { fn drop(&mut self) { if !self.is_null() { unsafe { c::xmp_string_free(self.ptr) }; } } } impl Eq for XmpString { } impl PartialEq for XmpString { fn eq(&self, other: &XmpString) -> bool { self.to_str() == other.to_str() } }
9
0
1
add_only
--- a/src/xmpstring.rs +++ b/src/xmpstring.rs @@ -43 +43,10 @@ } + +impl Eq for XmpString { + +} +impl PartialEq for XmpString { + fn eq(&self, other: &XmpString) -> bool { + self.to_str() == other.to_str() + } +}
--- a/src/xmpstring.rs +++ b/src/xmpstring.rs @@ ... @@ } + +impl Eq for XmpString { + +} +impl PartialEq for XmpString { + fn eq(&self, other: &XmpString) -> bool { + self.to_str() == other.to_str() + } +}
--- a/src/xmpstring.rs +++ b/src/xmpstring.rs @@ -43 +43,10 @@ CON } ADD ADD impl Eq for XmpString { ADD ADD } ADD impl PartialEq for XmpString { ADD fn eq(&self, other: &XmpString) -> bool { ADD self.to_str() == other.to_str() ADD } ADD }
<<<<<<< SEARCH } } ======= } } impl Eq for XmpString { } impl PartialEq for XmpString { fn eq(&self, other: &XmpString) -> bool { self.to_str() == other.to_str() } } >>>>>>> REPLACE
saltstack/salt
107b97e952d731f8c55c9ca3208ecd2a41512b8d
tests/integration/modules/sysmod.py
python
apache-2.0
Add test to verify loader modules
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) def test_valid_docs(self): ''' Make sure no functions are exposed that don't have valid docstrings ''' docs = self.run_function('sys.doc') bad = set() for fun in docs: if fun.startswith('runtests_helpers'): continue if not isinstance(docs[fun], basestring): bad.add(fun) elif not 'Example::' in docs[fun]: if not 'Examples::' in docs[fun]: bad.add(fun) if bad: import pprint pprint.pprint(sorted(bad)) self.assertFalse(bool(bad)) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
19
0
1
add_only
--- a/tests/integration/modules/sysmod.py +++ b/tests/integration/modules/sysmod.py @@ -23,2 +23,21 @@ + def test_valid_docs(self): + ''' + Make sure no functions are exposed that don't have valid docstrings + ''' + docs = self.run_function('sys.doc') + bad = set() + for fun in docs: + if fun.startswith('runtests_helpers'): + continue + if not isinstance(docs[fun], basestring): + bad.add(fun) + elif not 'Example::' in docs[fun]: + if not 'Examples::' in docs[fun]: + bad.add(fun) + if bad: + import pprint + pprint.pprint(sorted(bad)) + self.assertFalse(bool(bad)) +
--- a/tests/integration/modules/sysmod.py +++ b/tests/integration/modules/sysmod.py @@ ... @@ + def test_valid_docs(self): + ''' + Make sure no functions are exposed that don't have valid docstrings + ''' + docs = self.run_function('sys.doc') + bad = set() + for fun in docs: + if fun.startswith('runtests_helpers'): + continue + if not isinstance(docs[fun], basestring): + bad.add(fun) + elif not 'Example::' in docs[fun]: + if not 'Examples::' in docs[fun]: + bad.add(fun) + if bad: + import pprint + pprint.pprint(sorted(bad)) + self.assertFalse(bool(bad)) +
--- a/tests/integration/modules/sysmod.py +++ b/tests/integration/modules/sysmod.py @@ -23,2 +23,21 @@ CON ADD def test_valid_docs(self): ADD ''' ADD Make sure no functions are exposed that don't have valid docstrings ADD ''' ADD docs = self.run_function('sys.doc') ADD bad = set() ADD for fun in docs: ADD if fun.startswith('runtests_helpers'): ADD continue ADD if not isinstance(docs[fun], basestring): ADD bad.add(fun) ADD elif not 'Example::' in docs[fun]: ADD if not 'Examples::' in docs[fun]: ADD bad.add(fun) ADD if bad: ADD import pprint ADD pprint.pprint(sorted(bad)) ADD self.assertFalse(bool(bad)) ADD CON
<<<<<<< SEARCH self.assertTrue('pkg' in mods) if __name__ == '__main__': ======= self.assertTrue('pkg' in mods) def test_valid_docs(self): ''' Make sure no functions are exposed that don't have valid docstrings ''' docs = self.run_function('sys.doc') bad = set() for fun in docs: if fun.startswith('runtests_helpers'): continue if not isinstance(docs[fun], basestring): bad.add(fun) elif not 'Example::' in docs[fun]: if not 'Examples::' in docs[fun]: bad.add(fun) if bad: import pprint pprint.pprint(sorted(bad)) self.assertFalse(bool(bad)) if __name__ == '__main__': >>>>>>> REPLACE
jaywink/diaspora-hub
164a80ce3bcffad0e233426830c712cddd2f750b
thefederation/apps.py
python
agpl-3.0
Increase timeout of clean_duplicate_nodes job
import datetime import sys import django_rq from django.apps import AppConfig class TheFederationConfig(AppConfig): name = "thefederation" verbose_name = "The Federation" def ready(self): # Only register tasks if RQ Scheduler process if "rqscheduler" not in sys.argv: return from thefederation.social import make_daily_post from thefederation.tasks import aggregate_daily_stats from thefederation.tasks import clean_duplicate_nodes from thefederation.tasks import poll_nodes scheduler = django_rq.get_scheduler() # Delete any existing jobs in the scheduler when the app starts up for job in scheduler.get_jobs(): job.delete() scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=aggregate_daily_stats, interval=5500, queue_name='high', ) scheduler.cron( '0 10 * * *', func=make_daily_post, queue_name='high', ) scheduler.cron( '18 4 * * *', func=clean_duplicate_nodes, queue_name='medium', ) scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=poll_nodes, interval=10800, queue_name='medium', )
import datetime import sys import django_rq from django.apps import AppConfig class TheFederationConfig(AppConfig): name = "thefederation" verbose_name = "The Federation" def ready(self): # Only register tasks if RQ Scheduler process if "rqscheduler" not in sys.argv: return from thefederation.social import make_daily_post from thefederation.tasks import aggregate_daily_stats from thefederation.tasks import clean_duplicate_nodes from thefederation.tasks import poll_nodes scheduler = django_rq.get_scheduler() # Delete any existing jobs in the scheduler when the app starts up for job in scheduler.get_jobs(): job.delete() scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=aggregate_daily_stats, interval=5500, queue_name='high', ) scheduler.cron( '0 10 * * *', func=make_daily_post, queue_name='high', ) scheduler.cron( '18 4 * * *', func=clean_duplicate_nodes, queue_name='medium', timeout=3600, ) scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=poll_nodes, interval=10800, queue_name='medium', )
1
0
1
add_only
--- a/thefederation/apps.py +++ b/thefederation/apps.py @@ -41,2 +41,3 @@ queue_name='medium', + timeout=3600, )
--- a/thefederation/apps.py +++ b/thefederation/apps.py @@ ... @@ queue_name='medium', + timeout=3600, )
--- a/thefederation/apps.py +++ b/thefederation/apps.py @@ -41,2 +41,3 @@ CON queue_name='medium', ADD timeout=3600, CON )
<<<<<<< SEARCH func=clean_duplicate_nodes, queue_name='medium', ) scheduler.schedule( ======= func=clean_duplicate_nodes, queue_name='medium', timeout=3600, ) scheduler.schedule( >>>>>>> REPLACE
gasman/Willow
f37f972c3ded0671beed16c9e0c6ee2a5e764f5f
setup.py
python
bsd-3-clause
Exclude tests package from distribution
#!/usr/bin/env python import sys, os try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup # Hack to prevent "TypeError: 'NoneType' object is not callable" error # in multiprocessing/util.py _exit_function when setup.py exits # (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html) try: import multiprocessing except ImportError: pass setup( name='Willow', version='0.4a0', description='A Python image library that sits on top of Pillow, Wand and OpenCV', author='Karl Hobley', author_email='[email protected]', url='', packages=find_packages(), include_package_data=True, license='BSD', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Topic :: Multimedia :: Graphics', 'Topic :: Multimedia :: Graphics :: Graphics Conversion', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], install_requires=[], zip_safe=False, )
#!/usr/bin/env python import sys, os try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup # Hack to prevent "TypeError: 'NoneType' object is not callable" error # in multiprocessing/util.py _exit_function when setup.py exits # (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html) try: import multiprocessing except ImportError: pass setup( name='Willow', version='0.4a0', description='A Python image library that sits on top of Pillow, Wand and OpenCV', author='Karl Hobley', author_email='[email protected]', url='', packages=find_packages(exclude=['tests']), include_package_data=True, license='BSD', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Topic :: Multimedia :: Graphics', 'Topic :: Multimedia :: Graphics :: Graphics Conversion', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], install_requires=[], zip_safe=False, )
1
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -27,3 +27,3 @@ url='', - packages=find_packages(), + packages=find_packages(exclude=['tests']), include_package_data=True,
--- a/setup.py +++ b/setup.py @@ ... @@ url='', - packages=find_packages(), + packages=find_packages(exclude=['tests']), include_package_data=True,
--- a/setup.py +++ b/setup.py @@ -27,3 +27,3 @@ CON url='', DEL packages=find_packages(), ADD packages=find_packages(exclude=['tests']), CON include_package_data=True,
<<<<<<< SEARCH author_email='[email protected]', url='', packages=find_packages(), include_package_data=True, license='BSD', ======= author_email='[email protected]', url='', packages=find_packages(exclude=['tests']), include_package_data=True, license='BSD', >>>>>>> REPLACE
bobbo/rustfm-scrobble
31399e2090180de7b58d12d59123d0935bbf28cc
src/dto.rs
rust
mit
Use a custom deserializer to deserialize 'corrected' field of now playing response fields to bool
#[derive(Deserialize, Debug)] pub struct AuthResponse { pub session: SessionResponse } #[derive(Deserialize, Debug, Clone)] pub struct SessionResponse { pub key: String, pub subscriber: i64, pub name: String } #[derive(Deserialize, Debug)] pub struct NowPlayingResponseWrapper { pub nowplaying: NowPlayingResponse } #[derive(Deserialize, Debug)] pub struct NowPlayingResponse { pub artist: CorrectableString, pub album: CorrectableString, #[serde(rename="albumArtist")] pub album_artist: CorrectableString, pub track: CorrectableString } #[derive(Deserialize, Debug)] pub struct CorrectableString { pub corrected: String, #[serde(rename="#text")] pub text: String }
use serde; use serde_json as json; #[derive(Deserialize, Debug)] pub struct AuthResponse { pub session: SessionResponse } #[derive(Deserialize, Debug, Clone)] pub struct SessionResponse { pub key: String, pub subscriber: i64, pub name: String } #[derive(Deserialize, Debug)] pub struct NowPlayingResponseWrapper { pub nowplaying: NowPlayingResponse } #[derive(Deserialize, Debug)] pub struct NowPlayingResponse { pub artist: CorrectableString, pub album: CorrectableString, #[serde(rename="albumArtist")] pub album_artist: CorrectableString, pub track: CorrectableString } #[derive(Deserialize, Debug)] pub struct CorrectableString { #[serde(deserialize_with="CorrectableString::deserialize_corrected_field")] pub corrected: bool, #[serde(rename="#text")] pub text: String } impl CorrectableString { fn deserialize_corrected_field<D>(de: D) -> Result<bool, D::Error> where D: serde::Deserializer { let deser_result: json::Value = try!(serde::Deserialize::deserialize(de)); match deser_result { json::Value::String(ref s) if &*s == "1" => Ok(true), json::Value::String(ref s) if &*s == "0" => Ok(false), _ => Err(serde::de::Error::custom("Unexpected value")), } } }
20
1
3
mixed
--- a/src/dto.rs +++ b/src/dto.rs @@ -1 +1,4 @@ +use serde; +use serde_json as json; + #[derive(Deserialize, Debug)] @@ -28,3 +31,4 @@ pub struct CorrectableString { - pub corrected: String, + #[serde(deserialize_with="CorrectableString::deserialize_corrected_field")] + pub corrected: bool, #[serde(rename="#text")] @@ -32 +36,16 @@ } + +impl CorrectableString { + + fn deserialize_corrected_field<D>(de: D) -> Result<bool, D::Error> + where D: serde::Deserializer + { + let deser_result: json::Value = try!(serde::Deserialize::deserialize(de)); + match deser_result { + json::Value::String(ref s) if &*s == "1" => Ok(true), + json::Value::String(ref s) if &*s == "0" => Ok(false), + _ => Err(serde::de::Error::custom("Unexpected value")), + } + } + +}
--- a/src/dto.rs +++ b/src/dto.rs @@ ... @@ +use serde; +use serde_json as json; + #[derive(Deserialize, Debug)] @@ ... @@ pub struct CorrectableString { - pub corrected: String, + #[serde(deserialize_with="CorrectableString::deserialize_corrected_field")] + pub corrected: bool, #[serde(rename="#text")] @@ ... @@ } + +impl CorrectableString { + + fn deserialize_corrected_field<D>(de: D) -> Result<bool, D::Error> + where D: serde::Deserializer + { + let deser_result: json::Value = try!(serde::Deserialize::deserialize(de)); + match deser_result { + json::Value::String(ref s) if &*s == "1" => Ok(true), + json::Value::String(ref s) if &*s == "0" => Ok(false), + _ => Err(serde::de::Error::custom("Unexpected value")), + } + } + +}
--- a/src/dto.rs +++ b/src/dto.rs @@ -1 +1,4 @@ ADD use serde; ADD use serde_json as json; ADD CON #[derive(Deserialize, Debug)] @@ -28,3 +31,4 @@ CON pub struct CorrectableString { DEL pub corrected: String, ADD #[serde(deserialize_with="CorrectableString::deserialize_corrected_field")] ADD pub corrected: bool, CON #[serde(rename="#text")] @@ -32 +36,16 @@ CON } ADD ADD impl CorrectableString { ADD ADD fn deserialize_corrected_field<D>(de: D) -> Result<bool, D::Error> ADD where D: serde::Deserializer ADD { ADD let deser_result: json::Value = try!(serde::Deserialize::deserialize(de)); ADD match deser_result { ADD json::Value::String(ref s) if &*s == "1" => Ok(true), ADD json::Value::String(ref s) if &*s == "0" => Ok(false), ADD _ => Err(serde::de::Error::custom("Unexpected value")), ADD } ADD } ADD ADD }
<<<<<<< SEARCH #[derive(Deserialize, Debug)] pub struct AuthResponse { ======= use serde; use serde_json as json; #[derive(Deserialize, Debug)] pub struct AuthResponse { >>>>>>> REPLACE <<<<<<< SEARCH #[derive(Deserialize, Debug)] pub struct CorrectableString { pub corrected: String, #[serde(rename="#text")] pub text: String } ======= #[derive(Deserialize, Debug)] pub struct CorrectableString { #[serde(deserialize_with="CorrectableString::deserialize_corrected_field")] pub corrected: bool, #[serde(rename="#text")] pub text: String } impl CorrectableString { fn deserialize_corrected_field<D>(de: D) -> Result<bool, D::Error> where D: serde::Deserializer { let deser_result: json::Value = try!(serde::Deserialize::deserialize(de)); match deser_result { json::Value::String(ref s) if &*s == "1" => Ok(true), json::Value::String(ref s) if &*s == "0" => Ok(false), _ => Err(serde::de::Error::custom("Unexpected value")), } } } >>>>>>> REPLACE
ArchimedesPi/shellshocker
5b5975c70327defba016b000616a84db53141ea3
shellshocker_server/static/js/shocker.js
javascript
mit
Stop spinner when done :fistpump:
// When the document is ready $(document).ready(function() { // Grab the Handlebars template from a <script> tag alertTemplateText = $('#alertTemplateText').html(); // Compile the Handlebars template alertTemplate = Handlebars.compile(alertTemplateText); exploits = _.flatten( _.map(urlsToCheck, function (url) { return _.map(headersToCheck, function (header) { return {url: url, header: header}; }); }), true); console.log(exploits); exploits.reduce(function (sequence, exploit) { return sequence.then(function () { console.log(exploit); return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); }).then(function(result) { console.log(result); // Generate the HTML of the template template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); // Append it to the results well $('.results-well').append(template_html).fadeIn(); //console.log(template_html); }).then(function () { // It's all done, hide the spinner ;) //$('.testing-spinner').hide(); }); }, Promise.resolve()); });
// When the document is ready $(document).ready(function() { // Grab the Handlebars template from a <script> tag alertTemplateText = $('#alertTemplateText').html(); // Compile the Handlebars template alertTemplate = Handlebars.compile(alertTemplateText); exploits = _.flatten( _.map(urlsToCheck, function (url) { return _.map(headersToCheck, function (header) { return {url: url, header: header}; }); }), true); //console.log(exploits); Promise.resolve().then(function() { return exploits.reduce(function (sequence, exploit) { return sequence.then(function () { //console.log(exploit); return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); }).then(function(result) { //console.log(result); // Generate the HTML of the template template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); // Append it to the results well $('.results-well').append(template_html).fadeIn(); //console.log(template_html); }); }, Promise.resolve()); }).catch(function(err) { // Catch any error that comes our way alert('It\'s bloody broken! ' + err.message); //console.log(err); }).then(function() { // It's all done, hide the spinner ;) //console.log('all done!'); $('.testing-spinner').hide(); }); });
24
17
1
mixed
--- a/shellshocker_server/static/js/shocker.js +++ b/shellshocker_server/static/js/shocker.js @@ -15,20 +15,27 @@ - console.log(exploits); + //console.log(exploits); - exploits.reduce(function (sequence, exploit) { - return sequence.then(function () { - console.log(exploit); - return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); - }).then(function(result) { - console.log(result); - // Generate the HTML of the template - template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); - // Append it to the results well - $('.results-well').append(template_html).fadeIn(); - //console.log(template_html); - }).then(function () { - // It's all done, hide the spinner ;) - //$('.testing-spinner').hide(); - }); - }, Promise.resolve()); + Promise.resolve().then(function() { + return exploits.reduce(function (sequence, exploit) { + return sequence.then(function () { + //console.log(exploit); + return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); + }).then(function(result) { + //console.log(result); + // Generate the HTML of the template + template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); + // Append it to the results well + $('.results-well').append(template_html).fadeIn(); + //console.log(template_html); + }); + }, Promise.resolve()); + }).catch(function(err) { + // Catch any error that comes our way + alert('It\'s bloody broken! ' + err.message); + //console.log(err); + }).then(function() { + // It's all done, hide the spinner ;) + //console.log('all done!'); + $('.testing-spinner').hide(); + }); });
--- a/shellshocker_server/static/js/shocker.js +++ b/shellshocker_server/static/js/shocker.js @@ ... @@ - console.log(exploits); + //console.log(exploits); - exploits.reduce(function (sequence, exploit) { - return sequence.then(function () { - console.log(exploit); - return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); - }).then(function(result) { - console.log(result); - // Generate the HTML of the template - template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); - // Append it to the results well - $('.results-well').append(template_html).fadeIn(); - //console.log(template_html); - }).then(function () { - // It's all done, hide the spinner ;) - //$('.testing-spinner').hide(); - }); - }, Promise.resolve()); + Promise.resolve().then(function() { + return exploits.reduce(function (sequence, exploit) { + return sequence.then(function () { + //console.log(exploit); + return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); + }).then(function(result) { + //console.log(result); + // Generate the HTML of the template + template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); + // Append it to the results well + $('.results-well').append(template_html).fadeIn(); + //console.log(template_html); + }); + }, Promise.resolve()); + }).catch(function(err) { + // Catch any error that comes our way + alert('It\'s bloody broken! ' + err.message); + //console.log(err); + }).then(function() { + // It's all done, hide the spinner ;) + //console.log('all done!'); + $('.testing-spinner').hide(); + }); });
--- a/shellshocker_server/static/js/shocker.js +++ b/shellshocker_server/static/js/shocker.js @@ -15,20 +15,27 @@ CON DEL console.log(exploits); ADD //console.log(exploits); CON DEL exploits.reduce(function (sequence, exploit) { DEL return sequence.then(function () { DEL console.log(exploit); DEL return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); DEL }).then(function(result) { DEL console.log(result); DEL // Generate the HTML of the template DEL template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); DEL // Append it to the results well DEL $('.results-well').append(template_html).fadeIn(); DEL //console.log(template_html); DEL }).then(function () { DEL // It's all done, hide the spinner ;) DEL //$('.testing-spinner').hide(); DEL }); DEL }, Promise.resolve()); ADD Promise.resolve().then(function() { ADD return exploits.reduce(function (sequence, exploit) { ADD return sequence.then(function () { ADD //console.log(exploit); ADD return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); ADD }).then(function(result) { ADD //console.log(result); ADD // Generate the HTML of the template ADD template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); ADD // Append it to the results well ADD $('.results-well').append(template_html).fadeIn(); ADD //console.log(template_html); ADD }); ADD }, Promise.resolve()); ADD }).catch(function(err) { ADD // Catch any error that comes our way ADD alert('It\'s bloody broken! ' + err.message); ADD //console.log(err); ADD }).then(function() { ADD // It's all done, hide the spinner ;) ADD //console.log('all done!'); ADD $('.testing-spinner').hide(); ADD }); CON });
<<<<<<< SEARCH true); console.log(exploits); exploits.reduce(function (sequence, exploit) { return sequence.then(function () { console.log(exploit); return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); }).then(function(result) { console.log(result); // Generate the HTML of the template template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); // Append it to the results well $('.results-well').append(template_html).fadeIn(); //console.log(template_html); }).then(function () { // It's all done, hide the spinner ;) //$('.testing-spinner').hide(); }); }, Promise.resolve()); }); ======= true); //console.log(exploits); Promise.resolve().then(function() { return exploits.reduce(function (sequence, exploit) { return sequence.then(function () { //console.log(exploit); return post(exploitableCheckURL, {websiteUrl: exploit.url, header: exploit.header}); }).then(function(result) { //console.log(result); // Generate the HTML of the template template_html = alertTemplate({exploitable: result.response.exploitable, url: result.dat.websiteUrl, header: result.dat.header}); // Append it to the results well $('.results-well').append(template_html).fadeIn(); //console.log(template_html); }); }, Promise.resolve()); }).catch(function(err) { // Catch any error that comes our way alert('It\'s bloody broken! ' + err.message); //console.log(err); }).then(function() { // It's all done, hide the spinner ;) //console.log('all done!'); $('.testing-spinner').hide(); }); }); >>>>>>> REPLACE
Audiveris/audiveris
02538756680805e08950e46ddf44c667853f622a
src/main/omr/sheet/PixelPoint.java
java
agpl-3.0
Use of a more specific constructor, to avoid too easy conversions
//-----------------------------------------------------------------------// // // // P i x e l P o i n t // // // // Copyright (C) Herve Bitteur 2000-2006. All rights reserved. // // This software is released under the terms of the GNU General Public // // License. Please contact the author at [email protected] // // to report bugs & suggestions. // //-----------------------------------------------------------------------// package omr.sheet; import java.awt.*; /** * Class <code>PixelPoint</code> is a simple Point that is meant to * represent a point in a deskewed page, with its coordinates specified in * pixels, so the name. * * <p> This specialization is used to take benefit of compiler checks, to * prevent the use of points with incorrect meaning or units. </p> * * @author Herv&eacute; Bitteur * @version $Id$ */ public class PixelPoint extends Point { //------------// // PixelPoint // //------------// public PixelPoint () { } //------------// // PixelPoint // //------------// public PixelPoint (Point point) { super(point); } }
//-----------------------------------------------------------------------// // // // P i x e l P o i n t // // // // Copyright (C) Herve Bitteur 2000-2006. All rights reserved. // // This software is released under the terms of the GNU General Public // // License. Please contact the author at [email protected] // // to report bugs & suggestions. // //-----------------------------------------------------------------------// package omr.sheet; import java.awt.*; /** * Class <code>PixelPoint</code> is a simple Point that is meant to * represent a point in a deskewed page, with its coordinates specified in * pixels, so the name. * * <p> This specialization is used to take benefit of compiler checks, to * prevent the use of points with incorrect meaning or units. </p> * * @author Herv&eacute; Bitteur * @version $Id$ */ public class PixelPoint extends Point { //------------// // PixelPoint // //------------// public PixelPoint () { } //------------// // PixelPoint // //------------// public PixelPoint (int x, int y) { super(x, y); } }
3
2
1
mixed
--- a/src/main/omr/sheet/PixelPoint.java +++ b/src/main/omr/sheet/PixelPoint.java @@ -38,5 +38,6 @@ //------------// - public PixelPoint (Point point) + public PixelPoint (int x, + int y) { - super(point); + super(x, y); }
--- a/src/main/omr/sheet/PixelPoint.java +++ b/src/main/omr/sheet/PixelPoint.java @@ ... @@ //------------// - public PixelPoint (Point point) + public PixelPoint (int x, + int y) { - super(point); + super(x, y); }
--- a/src/main/omr/sheet/PixelPoint.java +++ b/src/main/omr/sheet/PixelPoint.java @@ -38,5 +38,6 @@ CON //------------// DEL public PixelPoint (Point point) ADD public PixelPoint (int x, ADD int y) CON { DEL super(point); ADD super(x, y); CON }
<<<<<<< SEARCH // PixelPoint // //------------// public PixelPoint (Point point) { super(point); } } ======= // PixelPoint // //------------// public PixelPoint (int x, int y) { super(x, y); } } >>>>>>> REPLACE
HPI-Hackathon/find-my-car
4632a726ac1f7fc4b2eaed5059a094eb606593cc
javascripts/service/car_ads_service.js
javascript
mit
Add params for location based search
define(["underscore", "jquery"], function(_, $) { return function (averageCartype) { var public = {}; var private = {}; private.createObject = function (averageCartype) { var queryObject = { c: [], sc: null, p: null, ecol: [] }; for (var carclass in averageCartype.carclass) queryObject.c.push(carclass); for (var color in averageCartype.colors) queryObject.ecol.push(color); var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; queryObject.sc = seatsRange; var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; return queryObject; }; private.createURL = function (queryObject) { var url = "http://m.mobile.de/svc/s/?"; var params = $.param(queryObject); url = url.concat(params); return url; }; public.getResults = function (averageCartype) { var queryObject = private.createObject(averageCartype); var url = private.createURL(queryObject); var results = $.getJSON(url, function(json) { return json; }); return results; }; return public; }; });
define(["underscore", "jquery", "app"], function(_, $, app) { return function (averageCartype) { var public = {}; var private = {}; private.createObject = function (averageCartype) { var queryObject = { c: [], ecol: [], sc: null, p: null, ll: null }; // Cartypes for (var carclass in averageCartype.carclass) queryObject.c.push(carclass); // Colors for (var color in averageCartype.colors) queryObject.ecol.push(color); // Seats var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; queryObject.sc = seatsRange; // Price var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; // Location queryObject.ll = app.userProfile.point; return queryObject; }; private.createURL = function (queryObject) { var url = "http://m.mobile.de/svc/s/?"; var params = $.param(queryObject); url = url.concat(params); return url; }; public.getResults = function (averageCartype) { var queryObject = private.createObject(averageCartype); var url = private.createURL(queryObject); var results = $.getJSON(url, function(json) { return json; }); return results; }; return public; }; });
10
2
5
mixed
--- a/javascripts/service/car_ads_service.js +++ b/javascripts/service/car_ads_service.js @@ -1,2 +1,2 @@ -define(["underscore", "jquery"], function(_, $) { +define(["underscore", "jquery", "app"], function(_, $, app) { return function (averageCartype) { @@ -9,7 +9,9 @@ c: [], + ecol: [], sc: null, p: null, - ecol: [] + ll: null }; + // Cartypes for (var carclass in averageCartype.carclass) @@ -17,2 +19,3 @@ + // Colors for (var color in averageCartype.colors) @@ -20,2 +23,3 @@ + // Seats var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; @@ -23,4 +27,8 @@ + // Price var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; + + // Location + queryObject.ll = app.userProfile.point;
--- a/javascripts/service/car_ads_service.js +++ b/javascripts/service/car_ads_service.js @@ ... @@ -define(["underscore", "jquery"], function(_, $) { +define(["underscore", "jquery", "app"], function(_, $, app) { return function (averageCartype) { @@ ... @@ c: [], + ecol: [], sc: null, p: null, - ecol: [] + ll: null }; + // Cartypes for (var carclass in averageCartype.carclass) @@ ... @@ + // Colors for (var color in averageCartype.colors) @@ ... @@ + // Seats var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; @@ ... @@ + // Price var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; + + // Location + queryObject.ll = app.userProfile.point;
--- a/javascripts/service/car_ads_service.js +++ b/javascripts/service/car_ads_service.js @@ -1,2 +1,2 @@ DEL define(["underscore", "jquery"], function(_, $) { ADD define(["underscore", "jquery", "app"], function(_, $, app) { CON return function (averageCartype) { @@ -9,7 +9,9 @@ CON c: [], ADD ecol: [], CON sc: null, CON p: null, DEL ecol: [] ADD ll: null CON }; CON ADD // Cartypes CON for (var carclass in averageCartype.carclass) @@ -17,2 +19,3 @@ CON ADD // Colors CON for (var color in averageCartype.colors) @@ -20,2 +23,3 @@ CON ADD // Seats CON var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; @@ -23,4 +27,8 @@ CON ADD // Price CON var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; CON queryObject.p = priceRange; ADD ADD // Location ADD queryObject.ll = app.userProfile.point; CON
<<<<<<< SEARCH define(["underscore", "jquery"], function(_, $) { return function (averageCartype) { ======= define(["underscore", "jquery", "app"], function(_, $, app) { return function (averageCartype) { >>>>>>> REPLACE <<<<<<< SEARCH var queryObject = { c: [], sc: null, p: null, ecol: [] }; for (var carclass in averageCartype.carclass) queryObject.c.push(carclass); for (var color in averageCartype.colors) queryObject.ecol.push(color); var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; queryObject.sc = seatsRange; var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; return queryObject; ======= var queryObject = { c: [], ecol: [], sc: null, p: null, ll: null }; // Cartypes for (var carclass in averageCartype.carclass) queryObject.c.push(carclass); // Colors for (var color in averageCartype.colors) queryObject.ecol.push(color); // Seats var seatsRange = averageCartype.min_seats + ":" + averageCartype.max_seats; queryObject.sc = seatsRange; // Price var priceRange = averageCartype.min_price + ":" + averageCartype.max_price; queryObject.p = priceRange; // Location queryObject.ll = app.userProfile.point; return queryObject; >>>>>>> REPLACE
indigo-dc/orchestrator
8af3fb06a577701885945577071c1470190a44d9
src/main/java/it/reply/orchestrator/Application.java
java
apache-2.0
Remove unused code left from JBMP -> Flowable migration
/* * Copyright © 2015-2018 Santer Reply S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.reply.orchestrator; import bitronix.tm.jndi.BitronixInitialContextFactory; import java.util.TimeZone; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.web.ErrorMvcAutoConfiguration; @SpringBootApplication(exclude = ErrorMvcAutoConfiguration.class) public class Application { public static final Class<Application> applicationClass = Application.class; static { // JBPM needs a JNDI context from which retrieve the UT and TSR System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, BitronixInitialContextFactory.class.getName()); } public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); SpringApplication.run(applicationClass, args); } }
/* * Copyright © 2015-2018 Santer Reply S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.reply.orchestrator; import java.util.TimeZone; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.web.ErrorMvcAutoConfiguration; @SpringBootApplication(exclude = ErrorMvcAutoConfiguration.class) public class Application { public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); SpringApplication.run(Application.class, args); } }
1
11
2
mixed
--- a/src/main/java/it/reply/orchestrator/Application.java +++ b/src/main/java/it/reply/orchestrator/Application.java @@ -18,4 +18,2 @@ -import bitronix.tm.jndi.BitronixInitialContextFactory; - import java.util.TimeZone; @@ -29,13 +27,5 @@ - public static final Class<Application> applicationClass = Application.class; - - static { - // JBPM needs a JNDI context from which retrieve the UT and TSR - System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, - BitronixInitialContextFactory.class.getName()); - } - public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - SpringApplication.run(applicationClass, args); + SpringApplication.run(Application.class, args); }
--- a/src/main/java/it/reply/orchestrator/Application.java +++ b/src/main/java/it/reply/orchestrator/Application.java @@ ... @@ -import bitronix.tm.jndi.BitronixInitialContextFactory; - import java.util.TimeZone; @@ ... @@ - public static final Class<Application> applicationClass = Application.class; - - static { - // JBPM needs a JNDI context from which retrieve the UT and TSR - System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, - BitronixInitialContextFactory.class.getName()); - } - public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - SpringApplication.run(applicationClass, args); + SpringApplication.run(Application.class, args); }
--- a/src/main/java/it/reply/orchestrator/Application.java +++ b/src/main/java/it/reply/orchestrator/Application.java @@ -18,4 +18,2 @@ CON DEL import bitronix.tm.jndi.BitronixInitialContextFactory; DEL CON import java.util.TimeZone; @@ -29,13 +27,5 @@ CON DEL public static final Class<Application> applicationClass = Application.class; DEL DEL static { DEL // JBPM needs a JNDI context from which retrieve the UT and TSR DEL System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, DEL BitronixInitialContextFactory.class.getName()); DEL } DEL CON public static void main(String[] args) { CON TimeZone.setDefault(TimeZone.getTimeZone("UTC")); DEL SpringApplication.run(applicationClass, args); ADD SpringApplication.run(Application.class, args); CON }
<<<<<<< SEARCH package it.reply.orchestrator; import bitronix.tm.jndi.BitronixInitialContextFactory; import java.util.TimeZone; ======= package it.reply.orchestrator; import java.util.TimeZone; >>>>>>> REPLACE <<<<<<< SEARCH public class Application { public static final Class<Application> applicationClass = Application.class; static { // JBPM needs a JNDI context from which retrieve the UT and TSR System.setProperty(javax.naming.Context.INITIAL_CONTEXT_FACTORY, BitronixInitialContextFactory.class.getName()); } public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); SpringApplication.run(applicationClass, args); } ======= public class Application { public static void main(String[] args) { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); SpringApplication.run(Application.class, args); } >>>>>>> REPLACE
metidia/waterline
0b15a5e070ddf0ab4f2a1cbda473facbe6b0dc69
test/unit/model/association.setters.js
javascript
mit
Add unit tests on .add() and .remove()
var assert = require('assert'), manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), Model = require('../../../lib/waterline/model'); describe('instance methods', function() { describe('association setters', function() { ///////////////////////////////////////////////////// // TEST SETUP //////////////////////////////////////////////////// var model; before(function() { model = new Model(manyToManyFixture(), {}); }); ///////////////////////////////////////////////////// // TEST METHODS //////////////////////////////////////////////////// it('should allow new associations to be added using the add function', function() { var person = new model({ name: 'foobar' }); person.bars.add(1); assert(person.associations.bars.addModels.length === 1); }); it('should allow new associations to be removed using the remove function', function() { var person = new model({ name: 'foobar' }); person.bars.remove(1); assert(person.associations.bars.removeModels.length === 1); }); }); });
var assert = require('assert'), manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), Model = require('../../../lib/waterline/model'); describe('instance methods', function() { describe('association setters', function() { ///////////////////////////////////////////////////// // TEST SETUP //////////////////////////////////////////////////// var model; before(function() { model = new Model(manyToManyFixture(), {}); }); ///////////////////////////////////////////////////// // TEST METHODS //////////////////////////////////////////////////// it('should allow new associations to be added using the add function', function() { var person = new model({ name: 'foobar' }); person.bars.add(1); assert(person.associations.bars.addModels.length === 1); }); it('should allow new associations to be added using the add function and an array', function() { var person = new model({ name: 'foobar' }); person.bars.add( [ 1, 2, 3 ] ); assert(person.associations.bars.addModels.length === 3); }); it('should allow new associations to be removed using the remove function', function() { var person = new model({ name: 'foobar' }); person.bars.remove(1); assert(person.associations.bars.removeModels.length === 1); }); it('should allow new associations to be removed using the remove function and an array', function() { var person = new model({ name: 'foobar' }); person.bars.remove( [ 1, 2, 3 ] ); assert(person.associations.bars.removeModels.length === 3); }); }); });
14
0
2
add_only
--- a/test/unit/model/association.setters.js +++ b/test/unit/model/association.setters.js @@ -29,2 +29,10 @@ + it('should allow new associations to be added using the add function and an array', function() { + var person = new model({ name: 'foobar' }); + + person.bars.add( [ 1, 2, 3 ] ); + assert(person.associations.bars.addModels.length === 3); + }); + + it('should allow new associations to be removed using the remove function', function() { @@ -36,2 +44,8 @@ + it('should allow new associations to be removed using the remove function and an array', function() { + var person = new model({ name: 'foobar' }); + + person.bars.remove( [ 1, 2, 3 ] ); + assert(person.associations.bars.removeModels.length === 3); + }); });
--- a/test/unit/model/association.setters.js +++ b/test/unit/model/association.setters.js @@ ... @@ + it('should allow new associations to be added using the add function and an array', function() { + var person = new model({ name: 'foobar' }); + + person.bars.add( [ 1, 2, 3 ] ); + assert(person.associations.bars.addModels.length === 3); + }); + + it('should allow new associations to be removed using the remove function', function() { @@ ... @@ + it('should allow new associations to be removed using the remove function and an array', function() { + var person = new model({ name: 'foobar' }); + + person.bars.remove( [ 1, 2, 3 ] ); + assert(person.associations.bars.removeModels.length === 3); + }); });
--- a/test/unit/model/association.setters.js +++ b/test/unit/model/association.setters.js @@ -29,2 +29,10 @@ CON ADD it('should allow new associations to be added using the add function and an array', function() { ADD var person = new model({ name: 'foobar' }); ADD ADD person.bars.add( [ 1, 2, 3 ] ); ADD assert(person.associations.bars.addModels.length === 3); ADD }); ADD ADD CON it('should allow new associations to be removed using the remove function', function() { @@ -36,2 +44,8 @@ CON ADD it('should allow new associations to be removed using the remove function and an array', function() { ADD var person = new model({ name: 'foobar' }); ADD ADD person.bars.remove( [ 1, 2, 3 ] ); ADD assert(person.associations.bars.removeModels.length === 3); ADD }); CON });
<<<<<<< SEARCH }); it('should allow new associations to be removed using the remove function', function() { var person = new model({ name: 'foobar' }); ======= }); it('should allow new associations to be added using the add function and an array', function() { var person = new model({ name: 'foobar' }); person.bars.add( [ 1, 2, 3 ] ); assert(person.associations.bars.addModels.length === 3); }); it('should allow new associations to be removed using the remove function', function() { var person = new model({ name: 'foobar' }); >>>>>>> REPLACE <<<<<<< SEARCH }); }); }); ======= }); it('should allow new associations to be removed using the remove function and an array', function() { var person = new model({ name: 'foobar' }); person.bars.remove( [ 1, 2, 3 ] ); assert(person.associations.bars.removeModels.length === 3); }); }); }); >>>>>>> REPLACE
ktorio/ktor
cecfee0ac22db51534bc7cbeb75097c731cba7f9
ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt
kotlin
apache-2.0
Use routing as a feature
package org.jetbrains.ktor.components import org.jetbrains.ktor.application.* import org.jetbrains.ktor.routing.* import kotlin.system.* public open class ComponentApplication(config: ApplicationConfig) : Application(config) { val container = StorageComponentContainer("Application") val routing = Routing() val log = config.log.fork("Components") init { val componentClassLoader = Thread.currentThread().contextClassLoader container.registerInstance(this) container.registerInstance(config) // TODO: instead of registering log itself, register component resolver, that can fork log for each component container.registerInstance(config.log) container.registerInstance(componentClassLoader) container.registerInstance(routing) val introspectionTime = measureTimeMillis { componentClassLoader .scanForClasses("") .filter { it.getAnnotation(Component::class.java) != null } .forEach { container.registerSingleton(it) } } log.info("Introspection took $introspectionTime ms") val compositionTime = measureTimeMillis { container.compose() } log.info("Composition took $compositionTime ms") routing.installInto(this) } override fun dispose() { super.dispose() container.close() } fun routing(body: RoutingEntry.() -> Unit) = routing.apply(body) } @Retention(AnnotationRetention.RUNTIME) annotation public class Component
package org.jetbrains.ktor.components import org.jetbrains.ktor.application.* import org.jetbrains.ktor.features.* import org.jetbrains.ktor.routing.* import kotlin.system.* public open class ComponentApplication(config: ApplicationConfig) : Application(config) { val container = StorageComponentContainer("Application") val routing = install(Routing) val log = config.log.fork("Components") init { val componentClassLoader = Thread.currentThread().contextClassLoader container.registerInstance(this) container.registerInstance(config) // TODO: instead of registering log itself, register component resolver, that can fork log for each component container.registerInstance(config.log) container.registerInstance(componentClassLoader) container.registerInstance(routing) val introspectionTime = measureTimeMillis { componentClassLoader .scanForClasses("") .filter { it.getAnnotation(Component::class.java) != null } .forEach { container.registerSingleton(it) } } log.info("Introspection took $introspectionTime ms") val compositionTime = measureTimeMillis { container.compose() } log.info("Composition took $compositionTime ms") } override fun dispose() { super.dispose() container.close() } fun routing(body: RoutingEntry.() -> Unit) = routing.apply(body) } @Retention(AnnotationRetention.RUNTIME) annotation public class Component
2
3
3
mixed
--- a/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt +++ b/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt @@ -3,2 +3,3 @@ import org.jetbrains.ktor.application.* +import org.jetbrains.ktor.features.* import org.jetbrains.ktor.routing.* @@ -8,3 +9,3 @@ val container = StorageComponentContainer("Application") - val routing = Routing() + val routing = install(Routing) val log = config.log.fork("Components") @@ -33,4 +34,2 @@ log.info("Composition took $compositionTime ms") - - routing.installInto(this) }
--- a/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt +++ b/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt @@ ... @@ import org.jetbrains.ktor.application.* +import org.jetbrains.ktor.features.* import org.jetbrains.ktor.routing.* @@ ... @@ val container = StorageComponentContainer("Application") - val routing = Routing() + val routing = install(Routing) val log = config.log.fork("Components") @@ ... @@ log.info("Composition took $compositionTime ms") - - routing.installInto(this) }
--- a/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt +++ b/ktor-features/ktor-components/src/org/jetbrains/ktor/components/ComponentApplication.kt @@ -3,2 +3,3 @@ CON import org.jetbrains.ktor.application.* ADD import org.jetbrains.ktor.features.* CON import org.jetbrains.ktor.routing.* @@ -8,3 +9,3 @@ CON val container = StorageComponentContainer("Application") DEL val routing = Routing() ADD val routing = install(Routing) CON val log = config.log.fork("Components") @@ -33,4 +34,2 @@ CON log.info("Composition took $compositionTime ms") DEL DEL routing.installInto(this) CON }
<<<<<<< SEARCH import org.jetbrains.ktor.application.* import org.jetbrains.ktor.routing.* import kotlin.system.* public open class ComponentApplication(config: ApplicationConfig) : Application(config) { val container = StorageComponentContainer("Application") val routing = Routing() val log = config.log.fork("Components") ======= import org.jetbrains.ktor.application.* import org.jetbrains.ktor.features.* import org.jetbrains.ktor.routing.* import kotlin.system.* public open class ComponentApplication(config: ApplicationConfig) : Application(config) { val container = StorageComponentContainer("Application") val routing = install(Routing) val log = config.log.fork("Components") >>>>>>> REPLACE <<<<<<< SEARCH } log.info("Composition took $compositionTime ms") routing.installInto(this) } ======= } log.info("Composition took $compositionTime ms") } >>>>>>> REPLACE
nickgonzales/hyper
00e46d7cdaf0e9ac17f1129521e9c780db1e00fe
examples/client.rs
rust
mit
chore(examples): Stop using deprecated std::os::args in example As per this compiler warning "warning: use of deprecated item: use std::env::args() instead, #[warn(deprecated)] on by default"
#![feature(os, io)] extern crate hyper; use std::os; use std::old_io::stdout; use std::old_io::util::copy; use hyper::Client; fn main() { let args = os::args(); match args.len() { 2 => (), _ => { println!("Usage: client <url>"); return; } }; let url = &*args[1]; let mut client = Client::new(); let mut res = match client.get(url).send() { Ok(res) => res, Err(err) => panic!("Failed to connect: {:?}", err) }; println!("Response: {}", res.status); println!("Headers:\n{}", res.headers); match copy(&mut res, &mut stdout()) { Ok(..) => (), Err(e) => panic!("Stream failure: {:?}", e) }; }
#![feature(env, os, io)] extern crate hyper; use std::env; use std::old_io::stdout; use std::old_io::util::copy; use hyper::Client; fn main() { let url = match env::args().nth(1) { Some(url) => url, None => { println!("Usage: client <url>"); return; } }; let url = match url.to_str() { Some(url) => url, None => { println!("Url contains invalid unicode"); return; } }; let mut client = Client::new(); let mut res = match client.get(url).send() { Ok(res) => res, Err(err) => panic!("Failed to connect: {:?}", err) }; println!("Response: {}", res.status); println!("Headers:\n{}", res.headers); match copy(&mut res, &mut stdout()) { Ok(..) => (), Err(e) => panic!("Stream failure: {:?}", e) }; }
12
7
3
mixed
--- a/examples/client.rs +++ b/examples/client.rs @@ -1,5 +1,5 @@ -#![feature(os, io)] +#![feature(env, os, io)] extern crate hyper; -use std::os; +use std::env; use std::old_io::stdout; @@ -10,6 +10,5 @@ fn main() { - let args = os::args(); - match args.len() { - 2 => (), - _ => { + let url = match env::args().nth(1) { + Some(url) => url, + None => { println!("Usage: client <url>"); @@ -19,3 +18,9 @@ - let url = &*args[1]; + let url = match url.to_str() { + Some(url) => url, + None => { + println!("Url contains invalid unicode"); + return; + } + };
--- a/examples/client.rs +++ b/examples/client.rs @@ ... @@ -#![feature(os, io)] +#![feature(env, os, io)] extern crate hyper; -use std::os; +use std::env; use std::old_io::stdout; @@ ... @@ fn main() { - let args = os::args(); - match args.len() { - 2 => (), - _ => { + let url = match env::args().nth(1) { + Some(url) => url, + None => { println!("Usage: client <url>"); @@ ... @@ - let url = &*args[1]; + let url = match url.to_str() { + Some(url) => url, + None => { + println!("Url contains invalid unicode"); + return; + } + };
--- a/examples/client.rs +++ b/examples/client.rs @@ -1,5 +1,5 @@ DEL #![feature(os, io)] ADD #![feature(env, os, io)] CON extern crate hyper; CON DEL use std::os; ADD use std::env; CON use std::old_io::stdout; @@ -10,6 +10,5 @@ CON fn main() { DEL let args = os::args(); DEL match args.len() { DEL 2 => (), DEL _ => { ADD let url = match env::args().nth(1) { ADD Some(url) => url, ADD None => { CON println!("Usage: client <url>"); @@ -19,3 +18,9 @@ CON DEL let url = &*args[1]; ADD let url = match url.to_str() { ADD Some(url) => url, ADD None => { ADD println!("Url contains invalid unicode"); ADD return; ADD } ADD }; CON
<<<<<<< SEARCH #![feature(os, io)] extern crate hyper; use std::os; use std::old_io::stdout; use std::old_io::util::copy; ======= #![feature(env, os, io)] extern crate hyper; use std::env; use std::old_io::stdout; use std::old_io::util::copy; >>>>>>> REPLACE <<<<<<< SEARCH fn main() { let args = os::args(); match args.len() { 2 => (), _ => { println!("Usage: client <url>"); return; } }; let url = &*args[1]; let mut client = Client::new(); ======= fn main() { let url = match env::args().nth(1) { Some(url) => url, None => { println!("Usage: client <url>"); return; } }; let url = match url.to_str() { Some(url) => url, None => { println!("Url contains invalid unicode"); return; } }; let mut client = Client::new(); >>>>>>> REPLACE