repo
stringlengths
8
50
commit
stringlengths
40
40
path
stringlengths
5
171
lang
stringclasses
5 values
license
stringclasses
13 values
message
stringlengths
21
1.33k
old_code
stringlengths
15
2.4k
new_code
stringlengths
140
2.61k
n_added
int64
0
81
n_removed
int64
0
58
n_hunks
int64
1
8
change_kind
stringclasses
3 values
udiff
stringlengths
88
3.33k
udiff-h
stringlengths
85
3.32k
udiff-l
stringlengths
95
3.57k
search-replace
stringlengths
89
3.36k
agoda-com/Kakao
c31ae715cbba04b6d8a366b53cd101c0bad92159
sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
kotlin
apache-2.0
Add test for picker dialogs
package com.agoda.sample
package com.agoda.sample import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner import androidx.test.rule.ActivityTestRule import com.agoda.kakao.screen.Screen import com.agoda.sample.screen.PickersActivityScreen import org.junit.Rule import org.junit.Test import org.junit.runner.RunWith @RunWith(AndroidJUnit4ClassRunner::class) class PickersTest { @Rule @JvmField val rule = ActivityTestRule(PickersActivity::class.java) @Test fun testTextInputLayout() { Screen.onScreen<PickersActivityScreen> { selectTimeButton { click() } timePicker { setTime(22, 4) } timeText { hasText("22:4") } selectDateButton { click() } datePicker { setDate(1955, 11, 12) } dateText { hasText("12 11 1955") } } } }
43
0
1
add_only
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt +++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt @@ -2 +2,44 @@ +import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner +import androidx.test.rule.ActivityTestRule +import com.agoda.kakao.screen.Screen +import com.agoda.sample.screen.PickersActivityScreen +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith + +@RunWith(AndroidJUnit4ClassRunner::class) +class PickersTest { + @Rule + @JvmField + val rule = ActivityTestRule(PickersActivity::class.java) + + @Test + fun testTextInputLayout() { + Screen.onScreen<PickersActivityScreen> { + selectTimeButton { + click() + } + + timePicker { + setTime(22, 4) + } + + timeText { + hasText("22:4") + } + + selectDateButton { + click() + } + + datePicker { + setDate(1955, 11, 12) + } + + dateText { + hasText("12 11 1955") + } + } + } +}
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt +++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt @@ ... @@ +import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner +import androidx.test.rule.ActivityTestRule +import com.agoda.kakao.screen.Screen +import com.agoda.sample.screen.PickersActivityScreen +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith + +@RunWith(AndroidJUnit4ClassRunner::class) +class PickersTest { + @Rule + @JvmField + val rule = ActivityTestRule(PickersActivity::class.java) + + @Test + fun testTextInputLayout() { + Screen.onScreen<PickersActivityScreen> { + selectTimeButton { + click() + } + + timePicker { + setTime(22, 4) + } + + timeText { + hasText("22:4") + } + + selectDateButton { + click() + } + + datePicker { + setDate(1955, 11, 12) + } + + dateText { + hasText("12 11 1955") + } + } + } +}
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt +++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt @@ -2 +2,44 @@ CON ADD import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner ADD import androidx.test.rule.ActivityTestRule ADD import com.agoda.kakao.screen.Screen ADD import com.agoda.sample.screen.PickersActivityScreen ADD import org.junit.Rule ADD import org.junit.Test ADD import org.junit.runner.RunWith ADD ADD @RunWith(AndroidJUnit4ClassRunner::class) ADD class PickersTest { ADD @Rule ADD @JvmField ADD val rule = ActivityTestRule(PickersActivity::class.java) ADD ADD @Test ADD fun testTextInputLayout() { ADD Screen.onScreen<PickersActivityScreen> { ADD selectTimeButton { ADD click() ADD } ADD ADD timePicker { ADD setTime(22, 4) ADD } ADD ADD timeText { ADD hasText("22:4") ADD } ADD ADD selectDateButton { ADD click() ADD } ADD ADD datePicker { ADD setDate(1955, 11, 12) ADD } ADD ADD dateText { ADD hasText("12 11 1955") ADD } ADD } ADD } ADD }
<<<<<<< SEARCH package com.agoda.sample ======= package com.agoda.sample import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner import androidx.test.rule.ActivityTestRule import com.agoda.kakao.screen.Screen import com.agoda.sample.screen.PickersActivityScreen import org.junit.Rule import org.junit.Test import org.junit.runner.RunWith @RunWith(AndroidJUnit4ClassRunner::class) class PickersTest { @Rule @JvmField val rule = ActivityTestRule(PickersActivity::class.java) @Test fun testTextInputLayout() { Screen.onScreen<PickersActivityScreen> { selectTimeButton { click() } timePicker { setTime(22, 4) } timeText { hasText("22:4") } selectDateButton { click() } datePicker { setDate(1955, 11, 12) } dateText { hasText("12 11 1955") } } } } >>>>>>> REPLACE
garystafford/voter-service
f48164f5b521205989b840c5dcf76e9fc43f4c56
src/main/java/com/example/voter/HostInfoContributor.java
java
apache-2.0
Add Mongo host info to info endpoint
package com.example.voter; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; import org.springframework.stereotype.Component; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; @Component public class HostInfoContributor implements InfoContributor { @Override public void contribute(Info.Builder builder) { InetAddress ip = null; Map<String, String> hostMap = new HashMap<>(); try { ip = InetAddress.getLocalHost(); } catch (UnknownHostException e) { e.printStackTrace(); } hostMap.put("ipAddress", ip.getHostAddress()); hostMap.put("hostname", ip.getHostName()); builder.withDetail("hostInfo", hostMap); } }
package com.example.voter; import com.mongodb.CommandResult; import com.mongodb.MongoClient; import org.bson.Document; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.stereotype.Component; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; @Component public class HostInfoContributor implements InfoContributor { @Autowired private MongoTemplate mongoTemplate; @Override public void contribute(Info.Builder builder) { InetAddress ip = null; Map<String, String> hostMap = new HashMap<>(); try { ip = InetAddress.getLocalHost(); } catch (UnknownHostException e) { e.printStackTrace(); } hostMap.put("ipAddress", ip.getHostAddress()); hostMap.put("hostname", ip.getHostName()); builder.withDetail("appHostInfo", hostMap); hostMap = new HashMap<>(); CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }"); hostMap.put("hostname", commandResult.getString("host")); MongoClient mongoClient = new MongoClient(); Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE)); hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString()); builder.withDetail("mongoDbHostInfo", hostMap); } }
20
1
4
mixed
--- a/src/main/java/com/example/voter/HostInfoContributor.java +++ b/src/main/java/com/example/voter/HostInfoContributor.java @@ -2,4 +2,9 @@ +import com.mongodb.CommandResult; +import com.mongodb.MongoClient; +import org.bson.Document; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.stereotype.Component; @@ -8,2 +13,3 @@ import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.HashMap; @@ -13,2 +19,5 @@ public class HostInfoContributor implements InfoContributor { + + @Autowired + private MongoTemplate mongoTemplate; @@ -26,3 +35,13 @@ hostMap.put("hostname", ip.getHostName()); - builder.withDetail("hostInfo", hostMap); + builder.withDetail("appHostInfo", hostMap); + + hostMap = new HashMap<>(); + CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }"); + hostMap.put("hostname", commandResult.getString("host")); + + MongoClient mongoClient = new MongoClient(); + Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE)); + + hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString()); + builder.withDetail("mongoDbHostInfo", hostMap); }
--- a/src/main/java/com/example/voter/HostInfoContributor.java +++ b/src/main/java/com/example/voter/HostInfoContributor.java @@ ... @@ +import com.mongodb.CommandResult; +import com.mongodb.MongoClient; +import org.bson.Document; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.stereotype.Component; @@ ... @@ import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.HashMap; @@ ... @@ public class HostInfoContributor implements InfoContributor { + + @Autowired + private MongoTemplate mongoTemplate; @@ ... @@ hostMap.put("hostname", ip.getHostName()); - builder.withDetail("hostInfo", hostMap); + builder.withDetail("appHostInfo", hostMap); + + hostMap = new HashMap<>(); + CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }"); + hostMap.put("hostname", commandResult.getString("host")); + + MongoClient mongoClient = new MongoClient(); + Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE)); + + hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString()); + builder.withDetail("mongoDbHostInfo", hostMap); }
--- a/src/main/java/com/example/voter/HostInfoContributor.java +++ b/src/main/java/com/example/voter/HostInfoContributor.java @@ -2,4 +2,9 @@ CON ADD import com.mongodb.CommandResult; ADD import com.mongodb.MongoClient; ADD import org.bson.Document; ADD import org.springframework.beans.factory.annotation.Autowired; CON import org.springframework.boot.actuate.info.Info; CON import org.springframework.boot.actuate.info.InfoContributor; ADD import org.springframework.data.mongodb.core.MongoTemplate; CON import org.springframework.stereotype.Component; @@ -8,2 +13,3 @@ CON import java.net.UnknownHostException; ADD import java.util.ArrayList; CON import java.util.HashMap; @@ -13,2 +19,5 @@ CON public class HostInfoContributor implements InfoContributor { ADD ADD @Autowired ADD private MongoTemplate mongoTemplate; CON @@ -26,3 +35,13 @@ CON hostMap.put("hostname", ip.getHostName()); DEL builder.withDetail("hostInfo", hostMap); ADD builder.withDetail("appHostInfo", hostMap); ADD ADD hostMap = new HashMap<>(); ADD CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }"); ADD hostMap.put("hostname", commandResult.getString("host")); ADD ADD MongoClient mongoClient = new MongoClient(); ADD Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE)); ADD ADD hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString()); ADD builder.withDetail("mongoDbHostInfo", hostMap); CON }
<<<<<<< SEARCH package com.example.voter; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; import org.springframework.stereotype.Component; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; @Component public class HostInfoContributor implements InfoContributor { @Override ======= package com.example.voter; import com.mongodb.CommandResult; import com.mongodb.MongoClient; import org.bson.Document; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.info.Info; import org.springframework.boot.actuate.info.InfoContributor; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.stereotype.Component; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; @Component public class HostInfoContributor implements InfoContributor { @Autowired private MongoTemplate mongoTemplate; @Override >>>>>>> REPLACE <<<<<<< SEARCH hostMap.put("ipAddress", ip.getHostAddress()); hostMap.put("hostname", ip.getHostName()); builder.withDetail("hostInfo", hostMap); } ======= hostMap.put("ipAddress", ip.getHostAddress()); hostMap.put("hostname", ip.getHostName()); builder.withDetail("appHostInfo", hostMap); hostMap = new HashMap<>(); CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }"); hostMap.put("hostname", commandResult.getString("host")); MongoClient mongoClient = new MongoClient(); Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE)); hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString()); builder.withDetail("mongoDbHostInfo", hostMap); } >>>>>>> REPLACE
mindriot101/rust-fitsio
deabbce2100cb976a5acd9f4630193c34b79b002
fitsio/src/columndescription.rs
rust
apache-2.0
Add implementations of data description types
/// Description for new columns #[derive(Debug, Clone)] pub struct ColumnDescription { pub name: String, // TODO: make this use one of the enums /// Type of the data, see the cfitsio documentation pub data_type: String, }
/// Description for new columns #[derive(Debug, Clone)] pub struct ColumnDescription { pub name: String, // TODO: make this use one of the enums /// Type of the data, see the cfitsio documentation pub data_type: String, } #[derive(Debug, Clone)] pub struct ColumnDataDescription { pub repeat: usize, pub width: usize, pub typ: ColumnDataType, } impl ColumnDataDescription { pub fn new(typ: ColumnDataType) -> Self { ColumnDataDescription { repeat: 1, width: 1, typ: typ, } } /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if * this means that memory is leaked, or that destructors are needlessly called (I suspect the * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will * be called in performance-critical code, and is more likely a one-dime definition. I will * leave it for now - SRW 2017-03-07 * */ pub fn repeats(&mut self, repeat: usize) -> Self { // TODO check that repeat >= 1 self.repeat = repeat; self.clone() } pub fn width(&mut self, width: usize) -> Self { // TODO check that width >= 1 self.width = width; self.clone() } } #[derive(Debug, Clone, Copy)] pub enum ColumnDataType { Int, } #[cfg(test)] mod test { use super::*; #[test] fn test_column_data_descriptions_builder_pattern() { let desc = ColumnDataDescription::new(ColumnDataType::Int) .width(100) .repeats(5); assert_eq!(desc.repeat, 5); assert_eq!(desc.width, 100); } }
54
0
1
add_only
--- a/fitsio/src/columndescription.rs +++ b/fitsio/src/columndescription.rs @@ -9 +9,55 @@ } + +#[derive(Debug, Clone)] +pub struct ColumnDataDescription { + pub repeat: usize, + pub width: usize, + pub typ: ColumnDataType, +} + +impl ColumnDataDescription { + pub fn new(typ: ColumnDataType) -> Self { + ColumnDataDescription { + repeat: 1, + width: 1, + typ: typ, + } + } + + /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if + * this means that memory is leaked, or that destructors are needlessly called (I suspect the + * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will + * be called in performance-critical code, and is more likely a one-dime definition. I will + * leave it for now - SRW 2017-03-07 + * */ + pub fn repeats(&mut self, repeat: usize) -> Self { + // TODO check that repeat >= 1 + self.repeat = repeat; + self.clone() + } + + pub fn width(&mut self, width: usize) -> Self { + // TODO check that width >= 1 + self.width = width; + self.clone() + } +} + +#[derive(Debug, Clone, Copy)] +pub enum ColumnDataType { + Int, +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_column_data_descriptions_builder_pattern() { + let desc = ColumnDataDescription::new(ColumnDataType::Int) + .width(100) + .repeats(5); + assert_eq!(desc.repeat, 5); + assert_eq!(desc.width, 100); + } +}
--- a/fitsio/src/columndescription.rs +++ b/fitsio/src/columndescription.rs @@ ... @@ } + +#[derive(Debug, Clone)] +pub struct ColumnDataDescription { + pub repeat: usize, + pub width: usize, + pub typ: ColumnDataType, +} + +impl ColumnDataDescription { + pub fn new(typ: ColumnDataType) -> Self { + ColumnDataDescription { + repeat: 1, + width: 1, + typ: typ, + } + } + + /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if + * this means that memory is leaked, or that destructors are needlessly called (I suspect the + * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will + * be called in performance-critical code, and is more likely a one-dime definition. I will + * leave it for now - SRW 2017-03-07 + * */ + pub fn repeats(&mut self, repeat: usize) -> Self { + // TODO check that repeat >= 1 + self.repeat = repeat; + self.clone() + } + + pub fn width(&mut self, width: usize) -> Self { + // TODO check that width >= 1 + self.width = width; + self.clone() + } +} + +#[derive(Debug, Clone, Copy)] +pub enum ColumnDataType { + Int, +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_column_data_descriptions_builder_pattern() { + let desc = ColumnDataDescription::new(ColumnDataType::Int) + .width(100) + .repeats(5); + assert_eq!(desc.repeat, 5); + assert_eq!(desc.width, 100); + } +}
--- a/fitsio/src/columndescription.rs +++ b/fitsio/src/columndescription.rs @@ -9 +9,55 @@ CON } ADD ADD #[derive(Debug, Clone)] ADD pub struct ColumnDataDescription { ADD pub repeat: usize, ADD pub width: usize, ADD pub typ: ColumnDataType, ADD } ADD ADD impl ColumnDataDescription { ADD pub fn new(typ: ColumnDataType) -> Self { ADD ColumnDataDescription { ADD repeat: 1, ADD width: 1, ADD typ: typ, ADD } ADD } ADD ADD /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if ADD * this means that memory is leaked, or that destructors are needlessly called (I suspect the ADD * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will ADD * be called in performance-critical code, and is more likely a one-dime definition. I will ADD * leave it for now - SRW 2017-03-07 ADD * */ ADD pub fn repeats(&mut self, repeat: usize) -> Self { ADD // TODO check that repeat >= 1 ADD self.repeat = repeat; ADD self.clone() ADD } ADD ADD pub fn width(&mut self, width: usize) -> Self { ADD // TODO check that width >= 1 ADD self.width = width; ADD self.clone() ADD } ADD } ADD ADD #[derive(Debug, Clone, Copy)] ADD pub enum ColumnDataType { ADD Int, ADD } ADD ADD #[cfg(test)] ADD mod test { ADD use super::*; ADD ADD #[test] ADD fn test_column_data_descriptions_builder_pattern() { ADD let desc = ColumnDataDescription::new(ColumnDataType::Int) ADD .width(100) ADD .repeats(5); ADD assert_eq!(desc.repeat, 5); ADD assert_eq!(desc.width, 100); ADD } ADD }
<<<<<<< SEARCH pub data_type: String, } ======= pub data_type: String, } #[derive(Debug, Clone)] pub struct ColumnDataDescription { pub repeat: usize, pub width: usize, pub typ: ColumnDataType, } impl ColumnDataDescription { pub fn new(typ: ColumnDataType) -> Self { ColumnDataDescription { repeat: 1, width: 1, typ: typ, } } /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if * this means that memory is leaked, or that destructors are needlessly called (I suspect the * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will * be called in performance-critical code, and is more likely a one-dime definition. I will * leave it for now - SRW 2017-03-07 * */ pub fn repeats(&mut self, repeat: usize) -> Self { // TODO check that repeat >= 1 self.repeat = repeat; self.clone() } pub fn width(&mut self, width: usize) -> Self { // TODO check that width >= 1 self.width = width; self.clone() } } #[derive(Debug, Clone, Copy)] pub enum ColumnDataType { Int, } #[cfg(test)] mod test { use super::*; #[test] fn test_column_data_descriptions_builder_pattern() { let desc = ColumnDataDescription::new(ColumnDataType::Int) .width(100) .repeats(5); assert_eq!(desc.repeat, 5); assert_eq!(desc.width, 100); } } >>>>>>> REPLACE
Jitsusama/lets-do-dns
f83282b1747e255d35e18e9fecad1750d1564f9e
do_record/record.py
python
apache-2.0
Remove Code That Doesn't Have a Test
"""DigitalOcean DNS Records.""" from certbot_dns_auth.printer import printer from do_record import http class Record(object): """Handle DigitalOcean DNS records.""" def __init__(self, api_key, domain, hostname): self._number = None self.domain = domain self.hostname = hostname self.api_key = api_key def create(self, value): """Create this record on DigitalOcean with the supplied value.""" self._number = http.create(self, value) return self.number def delete(self, record_id=None): """Delete this record on DigitalOcean, identified by record_id.""" if record_id is None: record_id = self.number http.delete(self, record_id) def printer(self): printer(self.number) @property def number(self): return self._number @number.setter def number(self, value): if self.number is None: self._number = value else: raise ValueError( 'Cannot externally reset a record\'s number identifier.')
"""DigitalOcean DNS Records.""" from certbot_dns_auth.printer import printer from do_record import http class Record(object): """Handle DigitalOcean DNS records.""" def __init__(self, api_key, domain, hostname): self._number = None self.domain = domain self.hostname = hostname self.api_key = api_key def create(self, value): """Create this record on DigitalOcean with the supplied value.""" self._number = http.create(self, value) return self.number def delete(self, record_id=None): """Delete this record on DigitalOcean, identified by record_id.""" if record_id is None: record_id = self.number http.delete(self, record_id) def printer(self): printer(self.number) @property def number(self): return self._number @number.setter def number(self, value): self._number = value
1
5
1
mixed
--- a/do_record/record.py +++ b/do_record/record.py @@ -35,6 +35,2 @@ def number(self, value): - if self.number is None: - self._number = value - else: - raise ValueError( - 'Cannot externally reset a record\'s number identifier.') + self._number = value
--- a/do_record/record.py +++ b/do_record/record.py @@ ... @@ def number(self, value): - if self.number is None: - self._number = value - else: - raise ValueError( - 'Cannot externally reset a record\'s number identifier.') + self._number = value
--- a/do_record/record.py +++ b/do_record/record.py @@ -35,6 +35,2 @@ CON def number(self, value): DEL if self.number is None: DEL self._number = value DEL else: DEL raise ValueError( DEL 'Cannot externally reset a record\'s number identifier.') ADD self._number = value
<<<<<<< SEARCH @number.setter def number(self, value): if self.number is None: self._number = value else: raise ValueError( 'Cannot externally reset a record\'s number identifier.') ======= @number.setter def number(self, value): self._number = value >>>>>>> REPLACE
jieter/python-lora
e5fe2994b05ffbb5abca5641ae75114da315e888
setup.py
python
mit
Use twine to upload package
#!/usr/bin/env python import os import sys from setuptools import setup from lora import VERSION if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() if sys.argv[-1] == 'tag': os.system("git tag -a v{} -m 'tagging v{}'".format(VERSION, VERSION)) os.system('git push && git push --tags') sys.exit() setup( name='python-lora', version=VERSION, description='Decrypt LoRa payloads', url='https://github.com/jieter/python-lora', author='Jan Pieter Waagmeester', author_email='[email protected]', license='MIT', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='LoRa decrypt', packages=['lora'], install_requires=[ 'cryptography==1.5.2' ], )
#!/usr/bin/env python import os import sys from setuptools import setup from lora import VERSION package_name = 'python-lora' if sys.argv[-1] == 'publish': os.system('python setup.py sdist') os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION)) sys.exit() if sys.argv[-1] == 'tag': os.system("git tag -a v{} -m 'tagging v{}'".format(VERSION, VERSION)) os.system('git push && git push --tags') sys.exit() setup( name='python-lora', version=VERSION, description='Decrypt LoRa payloads', url='https://github.com/jieter/python-lora', author='Jan Pieter Waagmeester', author_email='[email protected]', license='MIT', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='LoRa decrypt', packages=['lora'], install_requires=[ 'cryptography==1.5.2' ], )
4
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -9,4 +9,7 @@ +package_name = 'python-lora' + if sys.argv[-1] == 'publish': - os.system('python setup.py sdist upload') + os.system('python setup.py sdist') + os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION)) sys.exit()
--- a/setup.py +++ b/setup.py @@ ... @@ +package_name = 'python-lora' + if sys.argv[-1] == 'publish': - os.system('python setup.py sdist upload') + os.system('python setup.py sdist') + os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION)) sys.exit()
--- a/setup.py +++ b/setup.py @@ -9,4 +9,7 @@ CON ADD package_name = 'python-lora' ADD CON if sys.argv[-1] == 'publish': DEL os.system('python setup.py sdist upload') ADD os.system('python setup.py sdist') ADD os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION)) CON sys.exit()
<<<<<<< SEARCH from lora import VERSION if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() ======= from lora import VERSION package_name = 'python-lora' if sys.argv[-1] == 'publish': os.system('python setup.py sdist') os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION)) sys.exit() >>>>>>> REPLACE
rrussell39/selenium
c0470d7f93fab4bff5364a2d5e55250075cd79df
selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
java
apache-2.0
SimonStewart: Make the selenium-backed webdriver emulate the normal webdriver's xpath mode r10674
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.internal.selenesedriver; import com.thoughtworks.selenium.Selenium; import org.openqa.selenium.Capabilities; import org.openqa.selenium.Platform; import java.util.HashMap; import java.util.Map; public class NewSession implements SeleneseFunction<Map<String, Object>> { public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) { selenium.start(); Capabilities capabilities = (Capabilities) args.get("desiredCapabilities"); Map<String, Object> seenCapabilities = new HashMap<String, Object>(); seenCapabilities.put("browserName", capabilities.getBrowserName()); seenCapabilities.put("version", capabilities.getVersion()); seenCapabilities.put("platform", Platform.getCurrent().toString()); seenCapabilities.put("javascriptEnabled", true); return seenCapabilities; } }
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.internal.selenesedriver; import com.thoughtworks.selenium.Selenium; import org.openqa.selenium.Capabilities; import org.openqa.selenium.Platform; import java.util.HashMap; import java.util.Map; public class NewSession implements SeleneseFunction<Map<String, Object>> { public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) { selenium.start(); // Emulate behaviour of webdriver selenium.useXpathLibrary("javascript-xpath"); selenium.allowNativeXpath("true"); Capabilities capabilities = (Capabilities) args.get("desiredCapabilities"); Map<String, Object> seenCapabilities = new HashMap<String, Object>(); seenCapabilities.put("browserName", capabilities.getBrowserName()); seenCapabilities.put("version", capabilities.getVersion()); seenCapabilities.put("platform", Platform.getCurrent().toString()); seenCapabilities.put("javascriptEnabled", true); return seenCapabilities; } }
5
0
1
add_only
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java +++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java @@ -29,2 +29,7 @@ selenium.start(); + + // Emulate behaviour of webdriver + selenium.useXpathLibrary("javascript-xpath"); + selenium.allowNativeXpath("true"); + Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java +++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java @@ ... @@ selenium.start(); + + // Emulate behaviour of webdriver + selenium.useXpathLibrary("javascript-xpath"); + selenium.allowNativeXpath("true"); + Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java +++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java @@ -29,2 +29,7 @@ CON selenium.start(); ADD ADD // Emulate behaviour of webdriver ADD selenium.useXpathLibrary("javascript-xpath"); ADD selenium.allowNativeXpath("true"); ADD CON Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
<<<<<<< SEARCH public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) { selenium.start(); Capabilities capabilities = (Capabilities) args.get("desiredCapabilities"); Map<String, Object> seenCapabilities = new HashMap<String, Object>(); ======= public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) { selenium.start(); // Emulate behaviour of webdriver selenium.useXpathLibrary("javascript-xpath"); selenium.allowNativeXpath("true"); Capabilities capabilities = (Capabilities) args.get("desiredCapabilities"); Map<String, Object> seenCapabilities = new HashMap<String, Object>(); >>>>>>> REPLACE
endoli/hostinfo.rs
eb0f2ee0f33a360cc38b086a86f27b092ea95adb
src/lib.rs
rust
apache-2.0
Include core_foundation crate on macOS only.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Host Info //! //! This crate makes available information about the computer, //! operating system and software that is running. //! //! All of the functionality is presented by traits which are //! implemented by a `HostInfo` struct. //! //! ```rust //! use hostinfo::{HostInfo, UptimeInfo}; //! //! let hi = HostInfo::new(); //! let uptime = hi.uptime(); //! ``` #![warn(missing_docs)] #![deny(trivial_numeric_casts, unstable_features, unused_import_braces, unused_qualifications)] extern crate core_foundation; extern crate libc; mod sys; pub use self::sys::HostInfo; mod hardwareinfo; pub use self::hardwareinfo::HardwareInfo; mod kernellimits; pub use self::kernellimits::KernelLimits; mod powerinfo; pub use self::powerinfo::{BatteryInfo, PowerInfo, PowerSource}; mod swapinfo; pub use self::swapinfo::{SwapInfo, SwapUsage}; mod uptimeinfo; pub use self::uptimeinfo::UptimeInfo;
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Host Info //! //! This crate makes available information about the computer, //! operating system and software that is running. //! //! All of the functionality is presented by traits which are //! implemented by a `HostInfo` struct. //! //! ```rust //! use hostinfo::{HostInfo, UptimeInfo}; //! //! let hi = HostInfo::new(); //! let uptime = hi.uptime(); //! ``` #![warn(missing_docs)] #![deny(trivial_numeric_casts, unstable_features, unused_import_braces, unused_qualifications)] #[cfg(target_os = "macos")] extern crate core_foundation; extern crate libc; mod sys; pub use self::sys::HostInfo; mod hardwareinfo; pub use self::hardwareinfo::HardwareInfo; mod kernellimits; pub use self::kernellimits::KernelLimits; mod powerinfo; pub use self::powerinfo::{BatteryInfo, PowerInfo, PowerSource}; mod swapinfo; pub use self::swapinfo::{SwapInfo, SwapUsage}; mod uptimeinfo; pub use self::uptimeinfo::UptimeInfo;
1
0
1
add_only
--- a/src/lib.rs +++ b/src/lib.rs @@ -24,2 +24,3 @@ +#[cfg(target_os = "macos")] extern crate core_foundation;
--- a/src/lib.rs +++ b/src/lib.rs @@ ... @@ +#[cfg(target_os = "macos")] extern crate core_foundation;
--- a/src/lib.rs +++ b/src/lib.rs @@ -24,2 +24,3 @@ CON ADD #[cfg(target_os = "macos")] CON extern crate core_foundation;
<<<<<<< SEARCH unused_import_braces, unused_qualifications)] extern crate core_foundation; extern crate libc; ======= unused_import_braces, unused_qualifications)] #[cfg(target_os = "macos")] extern crate core_foundation; extern crate libc; >>>>>>> REPLACE
tock/libtock-rs
c877a6c44324b0135017ab1e1300c0d666746c84
core/platform/src/syscalls_impl.rs
rust
apache-2.0
Move yield_no_wait's flag init out of `unsafe`. Turns out core::mem::MaybeUninit::uninit is not `unsafe`!
//! Implements `Syscalls` for all types that implement `RawSyscalls`. use crate::{RawSyscalls, Syscalls, YieldNoWaitReturn}; mod yield_op { pub const NO_WAIT: u32 = 0; pub const WAIT: u32 = 1; } impl<S: RawSyscalls> Syscalls for S { // ------------------------------------------------------------------------- // Yield // ------------------------------------------------------------------------- fn yield_no_wait() -> YieldNoWaitReturn { unsafe { // flag can be uninitialized because it is not read before the yield // system call, and the kernel promises to only write to it (not // read it). let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); // flag is safe to write a YieldNoWaitReturn to, as guaranteed by // MaybeUninit. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ()); // yield-no-wait guarantees it sets (initializes) flag before // returning. flag.assume_init() } } fn yield_wait() { // Safety: yield-wait does not return a value, which satisfies yield1's // requirement. The yield-wait system call cannot trigger undefined // behavior on its own in any other way. unsafe { Self::yield1(yield_op::WAIT as *mut ()); } } }
//! Implements `Syscalls` for all types that implement `RawSyscalls`. use crate::{RawSyscalls, Syscalls, YieldNoWaitReturn}; mod yield_op { pub const NO_WAIT: u32 = 0; pub const WAIT: u32 = 1; } impl<S: RawSyscalls> Syscalls for S { // ------------------------------------------------------------------------- // Yield // ------------------------------------------------------------------------- fn yield_no_wait() -> YieldNoWaitReturn { let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); unsafe { // Flag can be uninitialized here because the kernel promises to // only write to it, not read from it. MaybeUninit guarantees that // it is safe to write a YieldNoWaitReturn into it. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ()); // yield-no-wait guarantees it sets (initializes) flag before // returning. flag.assume_init() } } fn yield_wait() { // Safety: yield-wait does not return a value, which satisfies yield1's // requirement. The yield-wait system call cannot trigger undefined // behavior on its own in any other way. unsafe { Self::yield1(yield_op::WAIT as *mut ()); } } }
5
7
1
mixed
--- a/core/platform/src/syscalls_impl.rs +++ b/core/platform/src/syscalls_impl.rs @@ -15,10 +15,8 @@ fn yield_no_wait() -> YieldNoWaitReturn { + let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); + unsafe { - // flag can be uninitialized because it is not read before the yield - // system call, and the kernel promises to only write to it (not - // read it). - let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); - - // flag is safe to write a YieldNoWaitReturn to, as guaranteed by - // MaybeUninit. + // Flag can be uninitialized here because the kernel promises to + // only write to it, not read from it. MaybeUninit guarantees that + // it is safe to write a YieldNoWaitReturn into it. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
--- a/core/platform/src/syscalls_impl.rs +++ b/core/platform/src/syscalls_impl.rs @@ ... @@ fn yield_no_wait() -> YieldNoWaitReturn { + let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); + unsafe { - // flag can be uninitialized because it is not read before the yield - // system call, and the kernel promises to only write to it (not - // read it). - let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); - - // flag is safe to write a YieldNoWaitReturn to, as guaranteed by - // MaybeUninit. + // Flag can be uninitialized here because the kernel promises to + // only write to it, not read from it. MaybeUninit guarantees that + // it is safe to write a YieldNoWaitReturn into it. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
--- a/core/platform/src/syscalls_impl.rs +++ b/core/platform/src/syscalls_impl.rs @@ -15,10 +15,8 @@ CON fn yield_no_wait() -> YieldNoWaitReturn { ADD let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); ADD CON unsafe { DEL // flag can be uninitialized because it is not read before the yield DEL // system call, and the kernel promises to only write to it (not DEL // read it). DEL let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); DEL DEL // flag is safe to write a YieldNoWaitReturn to, as guaranteed by DEL // MaybeUninit. ADD // Flag can be uninitialized here because the kernel promises to ADD // only write to it, not read from it. MaybeUninit guarantees that ADD // it is safe to write a YieldNoWaitReturn into it. CON Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
<<<<<<< SEARCH fn yield_no_wait() -> YieldNoWaitReturn { unsafe { // flag can be uninitialized because it is not read before the yield // system call, and the kernel promises to only write to it (not // read it). let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); // flag is safe to write a YieldNoWaitReturn to, as guaranteed by // MaybeUninit. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ()); ======= fn yield_no_wait() -> YieldNoWaitReturn { let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit(); unsafe { // Flag can be uninitialized here because the kernel promises to // only write to it, not read from it. MaybeUninit guarantees that // it is safe to write a YieldNoWaitReturn into it. Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ()); >>>>>>> REPLACE
hughrawlinson/meyda
5ad80c5925dc6b47510fb238d33439192500602c
webpack.config.js
javascript
mit
Change libraryTarget to UMD, remove add-module-export plugin
var path = require('path'); var webpack = require('webpack'); module.exports = { regular: { devtool: 'source-map', output: { filename: 'meyda.js', library: 'Meyda', libraryTarget: 'var' }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader', options: { presets: ['es2015'], plugins: [ 'add-module-exports' ] } } ] } }, minified: { devtool: 'source-map', output: { filename: 'meyda.min.js', sourceMapFilename: 'meyda.min.map', library: 'Meyda', libraryTarget: 'var' }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader', options: { presets: ['es2015'], plugins: [ 'add-module-exports' ] } } ] }, plugins: [ new webpack.optimize.UglifyJsPlugin({ compress: { warnings: true, drop_console: false }, sourceMap: true }) ] } };
var path = require('path'); var webpack = require('webpack'); module.exports = { regular: { devtool: 'source-map', output: { filename: 'meyda.js', library: 'Meyda', libraryTarget: 'umd' }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader', options: { presets: [['es2015', {modules: false}]] } } ] } }, minified: { devtool: 'source-map', output: { filename: 'meyda.min.js', sourceMapFilename: 'meyda.min.map', library: 'Meyda', libraryTarget: 'umd' }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader', options: { presets: [['es2015', {modules: false}]] } } ] }, plugins: [ new webpack.optimize.UglifyJsPlugin({ compress: { warnings: true, drop_console: false }, sourceMap: true }) ] } };
4
10
4
mixed
--- a/webpack.config.js +++ b/webpack.config.js @@ -9,3 +9,3 @@ library: 'Meyda', - libraryTarget: 'var' + libraryTarget: 'umd' }, @@ -18,6 +18,3 @@ options: { - presets: ['es2015'], - plugins: [ - 'add-module-exports' - ] + presets: [['es2015', {modules: false}]] } @@ -33,3 +30,3 @@ library: 'Meyda', - libraryTarget: 'var' + libraryTarget: 'umd' }, @@ -42,6 +39,3 @@ options: { - presets: ['es2015'], - plugins: [ - 'add-module-exports' - ] + presets: [['es2015', {modules: false}]] }
--- a/webpack.config.js +++ b/webpack.config.js @@ ... @@ library: 'Meyda', - libraryTarget: 'var' + libraryTarget: 'umd' }, @@ ... @@ options: { - presets: ['es2015'], - plugins: [ - 'add-module-exports' - ] + presets: [['es2015', {modules: false}]] } @@ ... @@ library: 'Meyda', - libraryTarget: 'var' + libraryTarget: 'umd' }, @@ ... @@ options: { - presets: ['es2015'], - plugins: [ - 'add-module-exports' - ] + presets: [['es2015', {modules: false}]] }
--- a/webpack.config.js +++ b/webpack.config.js @@ -9,3 +9,3 @@ CON library: 'Meyda', DEL libraryTarget: 'var' ADD libraryTarget: 'umd' CON }, @@ -18,6 +18,3 @@ CON options: { DEL presets: ['es2015'], DEL plugins: [ DEL 'add-module-exports' DEL ] ADD presets: [['es2015', {modules: false}]] CON } @@ -33,3 +30,3 @@ CON library: 'Meyda', DEL libraryTarget: 'var' ADD libraryTarget: 'umd' CON }, @@ -42,6 +39,3 @@ CON options: { DEL presets: ['es2015'], DEL plugins: [ DEL 'add-module-exports' DEL ] ADD presets: [['es2015', {modules: false}]] CON }
<<<<<<< SEARCH filename: 'meyda.js', library: 'Meyda', libraryTarget: 'var' }, module: { ======= filename: 'meyda.js', library: 'Meyda', libraryTarget: 'umd' }, module: { >>>>>>> REPLACE <<<<<<< SEARCH loader: 'babel-loader', options: { presets: ['es2015'], plugins: [ 'add-module-exports' ] } } ======= loader: 'babel-loader', options: { presets: [['es2015', {modules: false}]] } } >>>>>>> REPLACE <<<<<<< SEARCH sourceMapFilename: 'meyda.min.map', library: 'Meyda', libraryTarget: 'var' }, module: { ======= sourceMapFilename: 'meyda.min.map', library: 'Meyda', libraryTarget: 'umd' }, module: { >>>>>>> REPLACE <<<<<<< SEARCH loader: 'babel-loader', options: { presets: ['es2015'], plugins: [ 'add-module-exports' ] } } ======= loader: 'babel-loader', options: { presets: [['es2015', {modules: false}]] } } >>>>>>> REPLACE
jmeas/api-pls
acbd1a7b82d756ffd553d8ec23c402e8e6a42571
server/api.js
javascript
mit
Add 404 handler to unhandled requests
'use strict'; const express = require('express'); const routeBuilder = require('express-routebuilder'); const Resource = require('./resource'); const loadResources = require('./util/load-resources'); module.exports = function() { const router = express.Router(); // This version needs to be externalized var apiVersion = 1; var resources = loadResources() .map(resource => new Resource({ version: apiVersion, resource })); // Configure routes for our resources. resources.forEach(resource => router.use(routeBuilder( express.Router(), resource.routes, resource.location )) ); // Set up the root route that describes the available endpoints. router.get('/', (req, res) => { res.send({ version: 'v1', endpoints: resources.map(resource => { return { route: resource.location, methods: Object.keys(resource.routes) }; }) }); }); return router; };
'use strict'; const express = require('express'); const routeBuilder = require('express-routebuilder'); const Resource = require('./resource'); const serverErrors = require('./util/server-errors'); const loadResources = require('./util/load-resources'); module.exports = function() { const router = express.Router(); // This version needs to be externalized var apiVersion = 1; var resources = loadResources() .map(resource => new Resource({ version: apiVersion, resource })); // Configure routes for our resources. resources.forEach(resource => router.use(routeBuilder( express.Router(), resource.routes, resource.location )) ); // Set up the root route that describes the available endpoints. router.get('/', (req, res) => { res.send({ version: 'v1', endpoints: resources.map(resource => { return { route: resource.location, methods: Object.keys(resource.routes) }; }) }); }); // All other requests get a default 404 error. router.get('*', (req, res) => { res.status(serverErrors.notFound.code).send({ errors: [serverErrors.notFound.body()] }); }) return router; };
8
0
2
add_only
--- a/server/api.js +++ b/server/api.js @@ -5,2 +5,3 @@ const Resource = require('./resource'); +const serverErrors = require('./util/server-errors'); const loadResources = require('./util/load-resources'); @@ -41,2 +42,9 @@ + // All other requests get a default 404 error. + router.get('*', (req, res) => { + res.status(serverErrors.notFound.code).send({ + errors: [serverErrors.notFound.body()] + }); + }) + return router;
--- a/server/api.js +++ b/server/api.js @@ ... @@ const Resource = require('./resource'); +const serverErrors = require('./util/server-errors'); const loadResources = require('./util/load-resources'); @@ ... @@ + // All other requests get a default 404 error. + router.get('*', (req, res) => { + res.status(serverErrors.notFound.code).send({ + errors: [serverErrors.notFound.body()] + }); + }) + return router;
--- a/server/api.js +++ b/server/api.js @@ -5,2 +5,3 @@ CON const Resource = require('./resource'); ADD const serverErrors = require('./util/server-errors'); CON const loadResources = require('./util/load-resources'); @@ -41,2 +42,9 @@ CON ADD // All other requests get a default 404 error. ADD router.get('*', (req, res) => { ADD res.status(serverErrors.notFound.code).send({ ADD errors: [serverErrors.notFound.body()] ADD }); ADD }) ADD CON return router;
<<<<<<< SEARCH const routeBuilder = require('express-routebuilder'); const Resource = require('./resource'); const loadResources = require('./util/load-resources'); ======= const routeBuilder = require('express-routebuilder'); const Resource = require('./resource'); const serverErrors = require('./util/server-errors'); const loadResources = require('./util/load-resources'); >>>>>>> REPLACE <<<<<<< SEARCH }); return router; }; ======= }); // All other requests get a default 404 error. router.get('*', (req, res) => { res.status(serverErrors.notFound.code).send({ errors: [serverErrors.notFound.body()] }); }) return router; }; >>>>>>> REPLACE
sigp/lighthouse
dc0696754be06da921b2536af1771f9bdbc9d200
beacon_node/beacon_chain/test_harness/tests/chain.rs
rust
apache-2.0
Raise log level on test_harness tests
use env_logger::{Builder, Env}; use log::debug; use test_harness::BeaconChainHarness; use types::ChainSpec; #[test] fn it_can_build_on_genesis_block() { Builder::from_env(Env::default().default_filter_or("trace")).init(); let spec = ChainSpec::few_validators(); let validator_count = 8; let mut harness = BeaconChainHarness::new(spec, validator_count as usize); harness.advance_chain_with_block(); } #[test] #[ignore] fn it_can_produce_past_first_epoch_boundary() { Builder::from_env(Env::default().default_filter_or("debug")).init(); let spec = ChainSpec::few_validators(); let validator_count = 8; debug!("Starting harness build..."); let mut harness = BeaconChainHarness::new(spec, validator_count); debug!("Harness built, tests starting.."); let blocks = harness.spec.epoch_length * 2 + 1; for i in 0..blocks { harness.advance_chain_with_block(); debug!("Produced block {}/{}.", i + 1, blocks); } let dump = harness.chain_dump().expect("Chain dump failed."); assert_eq!(dump.len() as u64, blocks + 1); // + 1 for genesis block. harness.dump_to_file("/tmp/chaindump.json".to_string(), &dump); }
use env_logger::{Builder, Env}; use log::debug; use test_harness::BeaconChainHarness; use types::ChainSpec; #[test] fn it_can_build_on_genesis_block() { Builder::from_env(Env::default().default_filter_or("info")).init(); let spec = ChainSpec::few_validators(); let validator_count = 8; let mut harness = BeaconChainHarness::new(spec, validator_count as usize); harness.advance_chain_with_block(); } #[test] #[ignore] fn it_can_produce_past_first_epoch_boundary() { Builder::from_env(Env::default().default_filter_or("info")).init(); let spec = ChainSpec::few_validators(); let validator_count = 8; debug!("Starting harness build..."); let mut harness = BeaconChainHarness::new(spec, validator_count); debug!("Harness built, tests starting.."); let blocks = harness.spec.epoch_length * 2 + 1; for i in 0..blocks { harness.advance_chain_with_block(); debug!("Produced block {}/{}.", i + 1, blocks); } let dump = harness.chain_dump().expect("Chain dump failed."); assert_eq!(dump.len() as u64, blocks + 1); // + 1 for genesis block. harness.dump_to_file("/tmp/chaindump.json".to_string(), &dump); }
2
2
2
mixed
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs +++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs @@ -7,3 +7,3 @@ fn it_can_build_on_genesis_block() { - Builder::from_env(Env::default().default_filter_or("trace")).init(); + Builder::from_env(Env::default().default_filter_or("info")).init(); @@ -20,3 +20,3 @@ fn it_can_produce_past_first_epoch_boundary() { - Builder::from_env(Env::default().default_filter_or("debug")).init(); + Builder::from_env(Env::default().default_filter_or("info")).init();
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs +++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs @@ ... @@ fn it_can_build_on_genesis_block() { - Builder::from_env(Env::default().default_filter_or("trace")).init(); + Builder::from_env(Env::default().default_filter_or("info")).init(); @@ ... @@ fn it_can_produce_past_first_epoch_boundary() { - Builder::from_env(Env::default().default_filter_or("debug")).init(); + Builder::from_env(Env::default().default_filter_or("info")).init();
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs +++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs @@ -7,3 +7,3 @@ CON fn it_can_build_on_genesis_block() { DEL Builder::from_env(Env::default().default_filter_or("trace")).init(); ADD Builder::from_env(Env::default().default_filter_or("info")).init(); CON @@ -20,3 +20,3 @@ CON fn it_can_produce_past_first_epoch_boundary() { DEL Builder::from_env(Env::default().default_filter_or("debug")).init(); ADD Builder::from_env(Env::default().default_filter_or("info")).init(); CON
<<<<<<< SEARCH #[test] fn it_can_build_on_genesis_block() { Builder::from_env(Env::default().default_filter_or("trace")).init(); let spec = ChainSpec::few_validators(); ======= #[test] fn it_can_build_on_genesis_block() { Builder::from_env(Env::default().default_filter_or("info")).init(); let spec = ChainSpec::few_validators(); >>>>>>> REPLACE <<<<<<< SEARCH #[ignore] fn it_can_produce_past_first_epoch_boundary() { Builder::from_env(Env::default().default_filter_or("debug")).init(); let spec = ChainSpec::few_validators(); ======= #[ignore] fn it_can_produce_past_first_epoch_boundary() { Builder::from_env(Env::default().default_filter_or("info")).init(); let spec = ChainSpec::few_validators(); >>>>>>> REPLACE
dkrathi457/app
870800d5240e9fdc2f610204fd495c69bfae7f63
lib/forgot/forgot-view.js
javascript
mit
Remove unneeded switchOff function from forgot view
/* * Module dependencies. */ var template = require('./forgot-form'); var t = require('t'); var FormView = require('form-view'); var page = require('page'); /** * Expose ForgotView. */ module.exports = ForgotView; /** * Forgot password view * * @return {ForgotView} `ForgotView` instance. * @api public */ function ForgotView() { if (!(this instanceof ForgotView)) { return new ForgotView(); }; FormView.call(this, template); } /** * Extend from `FormView` */ FormView(ForgotView); ForgotView.prototype.switchOn = function() { this.on('success', this.bound('onsuccess')); this.on('error', this.bound('onerror')); }; ForgotView.prototype.switchoff = function() { this.off('success', this.bound('onsuccess')); this.off('error', this.bound('onerror')); }; /** * Show success message */ ForgotView.prototype.onsuccess = function() { var form = this.find('form'); var explanation = this.find('p.explanation-message'); var success = this.find('p.success-message'); form.addClass('hide'); explanation.addClass('hide'); success.removeClass('hide'); } /** * Handle errors */ ForgotView.prototype.onerror = function(error) { if ('notvalidated' === error.status) page('/signup/resend-validation-email'); };
/* * Module dependencies. */ var template = require('./forgot-form'); var t = require('t'); var FormView = require('form-view'); var page = require('page'); /** * Expose ForgotView. */ module.exports = ForgotView; /** * Forgot password view * * @return {ForgotView} `ForgotView` instance. * @api public */ function ForgotView() { if (!(this instanceof ForgotView)) { return new ForgotView(); }; FormView.call(this, template); } /** * Extend from `FormView` */ FormView(ForgotView); ForgotView.prototype.switchOn = function() { this.on('success', this.bound('onsuccess')); this.on('error', this.bound('onerror')); }; /** * Show success message */ ForgotView.prototype.onsuccess = function() { var form = this.find('form'); var explanation = this.find('p.explanation-message'); var success = this.find('p.success-message'); form.addClass('hide'); explanation.addClass('hide'); success.removeClass('hide'); } /** * Handle errors */ ForgotView.prototype.onerror = function(error) { if ('notvalidated' === error.status) page('/signup/resend-validation-email'); };
0
5
1
del_only
--- a/lib/forgot/forgot-view.js +++ b/lib/forgot/forgot-view.js @@ -41,7 +41,2 @@ -ForgotView.prototype.switchoff = function() { - this.off('success', this.bound('onsuccess')); - this.off('error', this.bound('onerror')); -}; - /**
--- a/lib/forgot/forgot-view.js +++ b/lib/forgot/forgot-view.js @@ ... @@ -ForgotView.prototype.switchoff = function() { - this.off('success', this.bound('onsuccess')); - this.off('error', this.bound('onerror')); -}; - /**
--- a/lib/forgot/forgot-view.js +++ b/lib/forgot/forgot-view.js @@ -41,7 +41,2 @@ CON DEL ForgotView.prototype.switchoff = function() { DEL this.off('success', this.bound('onsuccess')); DEL this.off('error', this.bound('onerror')); DEL }; DEL CON /**
<<<<<<< SEARCH }; ForgotView.prototype.switchoff = function() { this.off('success', this.bound('onsuccess')); this.off('error', this.bound('onerror')); }; /** * Show success message ======= }; /** * Show success message >>>>>>> REPLACE
elpassion/android-commons
2ca7bff437f0bf9b90d871689346d774df1a78e0
espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
kotlin
apache-2.0
Add does not have ChildWithTextAssertionsTests
package com.elpassion.android.commons.espresso import android.os.Bundle import android.support.test.rule.ActivityTestRule import android.widget.Button import android.widget.FrameLayout import org.junit.Rule import org.junit.Test class HasChildWithTextAssertionsTest { @JvmField @Rule val activityRule = ActivityTestRule(Activity::class.java) @Test fun shouldConfirmHasChildWithText() { onId(anId).hasChildWithText(firstText) } @Test fun shouldConfirmHasChildWithTextRes() { onId(anId).hasChildWithText(secondTextRes) } class Activity : android.app.Activity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(FrameLayout(this).apply { id = anId addView(Button(this.context).apply { text = firstText }) addView(Button(this.context).apply { setText(secondTextRes) }) }) } } companion object { private val anId = 124 private val firstText = "text" private val secondTextRes = R.string.app_name } }
package com.elpassion.android.commons.espresso import android.os.Bundle import android.support.test.rule.ActivityTestRule import android.widget.Button import android.widget.FrameLayout import org.junit.Rule import org.junit.Test class HasChildWithTextAssertionsTest { @JvmField @Rule val activityRule = ActivityTestRule(Activity::class.java) @Test fun shouldConfirmHasChildWithText() { onId(anId).hasChildWithText(firstText) } @Test fun shouldConfirmHasChildWithTextRes() { onId(anId).hasChildWithText(secondTextRes) } @Test fun shouldConfirmDoesNotHaveChildWithText() { onId(anId).doesNotHaveChildWithText("not existing text") } @Test fun shouldConfirmDoesNotHaveChildWithTextRes() { onId(anId).doesNotHaveChildWithText(123) } class Activity : android.app.Activity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(FrameLayout(this).apply { id = anId addView(Button(this.context).apply { text = firstText }) addView(Button(this.context).apply { setText(secondTextRes) }) }) } } companion object { private val anId = 124 private val firstText = "text" private val secondTextRes = R.string.app_name } }
10
0
1
add_only
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt +++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt @@ -24,2 +24,12 @@ + @Test + fun shouldConfirmDoesNotHaveChildWithText() { + onId(anId).doesNotHaveChildWithText("not existing text") + } + + @Test + fun shouldConfirmDoesNotHaveChildWithTextRes() { + onId(anId).doesNotHaveChildWithText(123) + } + class Activity : android.app.Activity() {
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt +++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt @@ ... @@ + @Test + fun shouldConfirmDoesNotHaveChildWithText() { + onId(anId).doesNotHaveChildWithText("not existing text") + } + + @Test + fun shouldConfirmDoesNotHaveChildWithTextRes() { + onId(anId).doesNotHaveChildWithText(123) + } + class Activity : android.app.Activity() {
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt +++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt @@ -24,2 +24,12 @@ CON ADD @Test ADD fun shouldConfirmDoesNotHaveChildWithText() { ADD onId(anId).doesNotHaveChildWithText("not existing text") ADD } ADD ADD @Test ADD fun shouldConfirmDoesNotHaveChildWithTextRes() { ADD onId(anId).doesNotHaveChildWithText(123) ADD } ADD CON class Activity : android.app.Activity() {
<<<<<<< SEARCH } class Activity : android.app.Activity() { override fun onCreate(savedInstanceState: Bundle?) { ======= } @Test fun shouldConfirmDoesNotHaveChildWithText() { onId(anId).doesNotHaveChildWithText("not existing text") } @Test fun shouldConfirmDoesNotHaveChildWithTextRes() { onId(anId).doesNotHaveChildWithText(123) } class Activity : android.app.Activity() { override fun onCreate(savedInstanceState: Bundle?) { >>>>>>> REPLACE
vespa-engine/vespa
9d46e55257521413d7171958d674a800332840e6
config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
java
apache-2.0
Add method to get SystemName.Environment.RegionName
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.config.provision.zone; import com.yahoo.config.provision.CloudName; import com.yahoo.config.provision.Environment; import com.yahoo.config.provision.RegionName; import com.yahoo.config.provision.SystemName; /** * @author hakonhall */ public interface ZoneApi { SystemName getSystemName(); ZoneId getId(); /** * Returns the virtual ID of this zone. For ordinary zones this is the same as {@link ZoneApi#getId()}, for a * system represented as a zone this is a fixed ID that is independent of the actual zone ID. */ default ZoneId getVirtualId() { return getId(); } default Environment getEnvironment() { return getId().environment(); } default RegionName getRegionName() { return getId().region(); } CloudName getCloudName(); /** Returns the region name within the cloud, e.g. 'us-east-1' in AWS */ String getCloudNativeRegionName(); }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.config.provision.zone; import com.yahoo.config.provision.CloudName; import com.yahoo.config.provision.Environment; import com.yahoo.config.provision.RegionName; import com.yahoo.config.provision.SystemName; /** * @author hakonhall */ public interface ZoneApi { SystemName getSystemName(); ZoneId getId(); /** Returns the SYSTEM.ENVIRONMENT.REGION string. */ default String getFullName() { return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value(); } /** * Returns the virtual ID of this zone. For ordinary zones this is the same as {@link ZoneApi#getId()}, for a * system represented as a zone this is a fixed ID that is independent of the actual zone ID. */ default ZoneId getVirtualId() { return getId(); } default Environment getEnvironment() { return getId().environment(); } default RegionName getRegionName() { return getId().region(); } CloudName getCloudName(); /** Returns the region name within the cloud, e.g. 'us-east-1' in AWS */ String getCloudNativeRegionName(); }
5
0
1
add_only
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java +++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java @@ -16,2 +16,7 @@ ZoneId getId(); + + /** Returns the SYSTEM.ENVIRONMENT.REGION string. */ + default String getFullName() { + return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value(); + }
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java +++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java @@ ... @@ ZoneId getId(); + + /** Returns the SYSTEM.ENVIRONMENT.REGION string. */ + default String getFullName() { + return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value(); + }
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java +++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java @@ -16,2 +16,7 @@ CON ZoneId getId(); ADD ADD /** Returns the SYSTEM.ENVIRONMENT.REGION string. */ ADD default String getFullName() { ADD return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value(); ADD } CON
<<<<<<< SEARCH ZoneId getId(); /** ======= ZoneId getId(); /** Returns the SYSTEM.ENVIRONMENT.REGION string. */ default String getFullName() { return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value(); } /** >>>>>>> REPLACE
jwilm/strava-rs
e59ffa425f1614ce226c66b4d6c341f3126a7062
src/api.rs
rust
mpl-2.0
Add paging methods to Paginated
use accesstoken::AccessToken; #[doc(hidden)] pub fn v3(token: &AccessToken, url: String) -> String { format!("https://www.strava.com/api/v3/{}?access_token={}", url, token.get()) } #[derive(Debug)] pub struct Paginated<T> { page: i32, per_page: i32, url: String, data: Vec<T> } impl<T> Paginated<T> { pub fn new(url: String, data: Vec<T>) -> Paginated<T> { Paginated { page: 1, per_page: 30, url: url, data: data, } } }
use accesstoken::AccessToken; #[doc(hidden)] pub fn v3(token: &AccessToken, url: String) -> String { format!("https://www.strava.com/api/v3/{}?access_token={}", url, token.get()) } /// Wrapper for endpoints that paginate /// /// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for /// fetching the next page and checking if more pages are available. #[derive(Debug)] pub struct Paginated<T> { page: usize, per_page: usize, url: String, data: Vec<T> } impl<T> Paginated<T> { pub fn new(url: String, data: Vec<T>) -> Paginated<T> { Paginated { page: 1, per_page: 30, url: url, data: data, } } /// Get the next page of results pub fn next(&self) -> Option<Paginated<T>> { unimplemented!(); } /// Check if this is the last page pub fn last_page(&self) -> bool { self.per_page != self.data.len() } } #[cfg(test)] mod paginated_tests { use super::Paginated; #[test] fn last_page() { let vec = (0..30).collect::<Vec<u8>>(); let pager = Paginated::new("test".to_string(), vec); println!("{:?}", pager); assert_eq!(pager.last_page(), false); } }
29
2
2
mixed
--- a/src/api.rs +++ b/src/api.rs @@ -7,6 +7,10 @@ +/// Wrapper for endpoints that paginate +/// +/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for +/// fetching the next page and checking if more pages are available. #[derive(Debug)] pub struct Paginated<T> { - page: i32, - per_page: i32, + page: usize, + per_page: usize, url: String, @@ -24,2 +28,25 @@ } + + /// Get the next page of results + pub fn next(&self) -> Option<Paginated<T>> { + unimplemented!(); + } + + /// Check if this is the last page + pub fn last_page(&self) -> bool { + self.per_page != self.data.len() + } } + +#[cfg(test)] +mod paginated_tests { + use super::Paginated; + + #[test] + fn last_page() { + let vec = (0..30).collect::<Vec<u8>>(); + let pager = Paginated::new("test".to_string(), vec); + println!("{:?}", pager); + assert_eq!(pager.last_page(), false); + } +}
--- a/src/api.rs +++ b/src/api.rs @@ ... @@ +/// Wrapper for endpoints that paginate +/// +/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for +/// fetching the next page and checking if more pages are available. #[derive(Debug)] pub struct Paginated<T> { - page: i32, - per_page: i32, + page: usize, + per_page: usize, url: String, @@ ... @@ } + + /// Get the next page of results + pub fn next(&self) -> Option<Paginated<T>> { + unimplemented!(); + } + + /// Check if this is the last page + pub fn last_page(&self) -> bool { + self.per_page != self.data.len() + } } + +#[cfg(test)] +mod paginated_tests { + use super::Paginated; + + #[test] + fn last_page() { + let vec = (0..30).collect::<Vec<u8>>(); + let pager = Paginated::new("test".to_string(), vec); + println!("{:?}", pager); + assert_eq!(pager.last_page(), false); + } +}
--- a/src/api.rs +++ b/src/api.rs @@ -7,6 +7,10 @@ CON ADD /// Wrapper for endpoints that paginate ADD /// ADD /// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for ADD /// fetching the next page and checking if more pages are available. CON #[derive(Debug)] CON pub struct Paginated<T> { DEL page: i32, DEL per_page: i32, ADD page: usize, ADD per_page: usize, CON url: String, @@ -24,2 +28,25 @@ CON } ADD ADD /// Get the next page of results ADD pub fn next(&self) -> Option<Paginated<T>> { ADD unimplemented!(); ADD } ADD ADD /// Check if this is the last page ADD pub fn last_page(&self) -> bool { ADD self.per_page != self.data.len() ADD } CON } ADD ADD #[cfg(test)] ADD mod paginated_tests { ADD use super::Paginated; ADD ADD #[test] ADD fn last_page() { ADD let vec = (0..30).collect::<Vec<u8>>(); ADD let pager = Paginated::new("test".to_string(), vec); ADD println!("{:?}", pager); ADD assert_eq!(pager.last_page(), false); ADD } ADD }
<<<<<<< SEARCH } #[derive(Debug)] pub struct Paginated<T> { page: i32, per_page: i32, url: String, data: Vec<T> ======= } /// Wrapper for endpoints that paginate /// /// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for /// fetching the next page and checking if more pages are available. #[derive(Debug)] pub struct Paginated<T> { page: usize, per_page: usize, url: String, data: Vec<T> >>>>>>> REPLACE <<<<<<< SEARCH } } } ======= } } /// Get the next page of results pub fn next(&self) -> Option<Paginated<T>> { unimplemented!(); } /// Check if this is the last page pub fn last_page(&self) -> bool { self.per_page != self.data.len() } } #[cfg(test)] mod paginated_tests { use super::Paginated; #[test] fn last_page() { let vec = (0..30).collect::<Vec<u8>>(); let pager = Paginated::new("test".to_string(), vec); println!("{:?}", pager); assert_eq!(pager.last_page(), false); } } >>>>>>> REPLACE
AcapellaSoft/Aconite
68efdf458c0985c48ff8a1d9f3d38bed007f5632
aconite-core/src/io/aconite/utils/Async.kt
kotlin
mit
Use of COROUTINE_SUSPENDED from Kotlin internals
package io.aconite.utils import java.lang.reflect.InvocationTargetException import kotlin.coroutines.experimental.Continuation import kotlin.coroutines.experimental.CoroutineContext import kotlin.coroutines.experimental.suspendCoroutine import kotlin.reflect.KFunction /** * This object can be used as the return value of the async function to indicate * that function was suspended. This works only with function [asyncCall]. */ val COROUTINE_SUSPENDED = Any() private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> { override val context: CoroutineContext get() = c.context override fun resume(value: R) { if (value === COROUTINE_SUSPENDED) return c.resume(value) } override fun resumeWithException(exception: Throwable) { if (exception === COROUTINE_SUSPENDED) return c.resumeWithException(exception) } } /** * Extension for calling asynchronous functions by reflection. * @receiver the called function * @param[args] arguments of the called function * @return result of the called function */ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> val cc = MyContinuation(c) try { val r = call(*args, cc) cc.resume(r) } catch (ex: InvocationTargetException) { throw ex.cause ?: ex } }
package io.aconite.utils import java.lang.reflect.InvocationTargetException import kotlin.coroutines.experimental.suspendCoroutine import kotlin.reflect.KFunction /** * This object can be used as the return value of the async function to indicate * that function was suspended. * TODO: find better way to use suspend with reflection */ val COROUTINE_SUSPENDED: Any = { val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt") val field = cls.getDeclaredField("COROUTINE_SUSPENDED") field.isAccessible = true field.get(null) }() /** * Extension for calling asynchronous functions by reflection. * @receiver the called function * @param[args] arguments of the called function * @return result of the called function */ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> try { val r = call(*args, c) if (r !== COROUTINE_SUSPENDED) c.resume(r) } catch (ex: InvocationTargetException) { throw ex.cause ?: ex } }
10
22
3
mixed
--- a/aconite-core/src/io/aconite/utils/Async.kt +++ b/aconite-core/src/io/aconite/utils/Async.kt @@ -3,4 +3,2 @@ import java.lang.reflect.InvocationTargetException -import kotlin.coroutines.experimental.Continuation -import kotlin.coroutines.experimental.CoroutineContext import kotlin.coroutines.experimental.suspendCoroutine @@ -10,20 +8,11 @@ * This object can be used as the return value of the async function to indicate - * that function was suspended. This works only with function [asyncCall]. + * that function was suspended. + * TODO: find better way to use suspend with reflection */ -val COROUTINE_SUSPENDED = Any() - -private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> { - override val context: CoroutineContext - get() = c.context - - override fun resume(value: R) { - if (value === COROUTINE_SUSPENDED) return - c.resume(value) - } - - override fun resumeWithException(exception: Throwable) { - if (exception === COROUTINE_SUSPENDED) return - c.resumeWithException(exception) - } -} +val COROUTINE_SUSPENDED: Any = { + val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt") + val field = cls.getDeclaredField("COROUTINE_SUSPENDED") + field.isAccessible = true + field.get(null) +}() @@ -36,6 +25,5 @@ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> - val cc = MyContinuation(c) try { - val r = call(*args, cc) - cc.resume(r) + val r = call(*args, c) + if (r !== COROUTINE_SUSPENDED) c.resume(r) } catch (ex: InvocationTargetException) {
--- a/aconite-core/src/io/aconite/utils/Async.kt +++ b/aconite-core/src/io/aconite/utils/Async.kt @@ ... @@ import java.lang.reflect.InvocationTargetException -import kotlin.coroutines.experimental.Continuation -import kotlin.coroutines.experimental.CoroutineContext import kotlin.coroutines.experimental.suspendCoroutine @@ ... @@ * This object can be used as the return value of the async function to indicate - * that function was suspended. This works only with function [asyncCall]. + * that function was suspended. + * TODO: find better way to use suspend with reflection */ -val COROUTINE_SUSPENDED = Any() - -private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> { - override val context: CoroutineContext - get() = c.context - - override fun resume(value: R) { - if (value === COROUTINE_SUSPENDED) return - c.resume(value) - } - - override fun resumeWithException(exception: Throwable) { - if (exception === COROUTINE_SUSPENDED) return - c.resumeWithException(exception) - } -} +val COROUTINE_SUSPENDED: Any = { + val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt") + val field = cls.getDeclaredField("COROUTINE_SUSPENDED") + field.isAccessible = true + field.get(null) +}() @@ ... @@ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> - val cc = MyContinuation(c) try { - val r = call(*args, cc) - cc.resume(r) + val r = call(*args, c) + if (r !== COROUTINE_SUSPENDED) c.resume(r) } catch (ex: InvocationTargetException) {
--- a/aconite-core/src/io/aconite/utils/Async.kt +++ b/aconite-core/src/io/aconite/utils/Async.kt @@ -3,4 +3,2 @@ CON import java.lang.reflect.InvocationTargetException DEL import kotlin.coroutines.experimental.Continuation DEL import kotlin.coroutines.experimental.CoroutineContext CON import kotlin.coroutines.experimental.suspendCoroutine @@ -10,20 +8,11 @@ CON * This object can be used as the return value of the async function to indicate DEL * that function was suspended. This works only with function [asyncCall]. ADD * that function was suspended. ADD * TODO: find better way to use suspend with reflection CON */ DEL val COROUTINE_SUSPENDED = Any() DEL DEL private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> { DEL override val context: CoroutineContext DEL get() = c.context DEL DEL override fun resume(value: R) { DEL if (value === COROUTINE_SUSPENDED) return DEL c.resume(value) DEL } DEL DEL override fun resumeWithException(exception: Throwable) { DEL if (exception === COROUTINE_SUSPENDED) return DEL c.resumeWithException(exception) DEL } DEL } ADD val COROUTINE_SUSPENDED: Any = { ADD val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt") ADD val field = cls.getDeclaredField("COROUTINE_SUSPENDED") ADD field.isAccessible = true ADD field.get(null) ADD }() CON @@ -36,6 +25,5 @@ CON suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> DEL val cc = MyContinuation(c) CON try { DEL val r = call(*args, cc) DEL cc.resume(r) ADD val r = call(*args, c) ADD if (r !== COROUTINE_SUSPENDED) c.resume(r) CON } catch (ex: InvocationTargetException) {
<<<<<<< SEARCH import java.lang.reflect.InvocationTargetException import kotlin.coroutines.experimental.Continuation import kotlin.coroutines.experimental.CoroutineContext import kotlin.coroutines.experimental.suspendCoroutine import kotlin.reflect.KFunction /** * This object can be used as the return value of the async function to indicate * that function was suspended. This works only with function [asyncCall]. */ val COROUTINE_SUSPENDED = Any() private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> { override val context: CoroutineContext get() = c.context override fun resume(value: R) { if (value === COROUTINE_SUSPENDED) return c.resume(value) } override fun resumeWithException(exception: Throwable) { if (exception === COROUTINE_SUSPENDED) return c.resumeWithException(exception) } } /** ======= import java.lang.reflect.InvocationTargetException import kotlin.coroutines.experimental.suspendCoroutine import kotlin.reflect.KFunction /** * This object can be used as the return value of the async function to indicate * that function was suspended. * TODO: find better way to use suspend with reflection */ val COROUTINE_SUSPENDED: Any = { val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt") val field = cls.getDeclaredField("COROUTINE_SUSPENDED") field.isAccessible = true field.get(null) }() /** >>>>>>> REPLACE <<<<<<< SEARCH */ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> val cc = MyContinuation(c) try { val r = call(*args, cc) cc.resume(r) } catch (ex: InvocationTargetException) { throw ex.cause ?: ex ======= */ suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c -> try { val r = call(*args, c) if (r !== COROUTINE_SUSPENDED) c.resume(r) } catch (ex: InvocationTargetException) { throw ex.cause ?: ex >>>>>>> REPLACE
Reinaesaya/OUIRL-ChatBot
396ab20874a0c3492482a8ae03fd7d61980917a5
chatterbot/adapters/logic/closest_match.py
python
bsd-3-clause
Update closest match adapter docstring.
# -*- coding: utf-8 -*- from fuzzywuzzy import fuzz from .base_match import BaseMatchAdapter class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter logic adapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement self.logger.info( u'No statements have known responses. ' + u'Choosing a random response to return.' ) return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower()) if ratio > confidence: confidence = ratio closest_match = statement # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
# -*- coding: utf-8 -*- from fuzzywuzzy import fuzz from .base_match import BaseMatchAdapter class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter logic adapter selects a known response to an input by searching for a known statement that most closely matches the input based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement self.logger.info( u'No statements have known responses. ' + u'Choosing a random response to return.' ) return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower()) if ratio > confidence: confidence = ratio closest_match = statement # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
3
6
2
mixed
--- a/chatterbot/adapters/logic/closest_match.py +++ b/chatterbot/adapters/logic/closest_match.py @@ -2,3 +2,2 @@ from fuzzywuzzy import fuzz - from .base_match import BaseMatchAdapter @@ -8,7 +7,5 @@ """ - The ClosestMatchAdapter logic adapter creates a response by - using fuzzywuzzy's process class to extract the most similar - response to the input. This adapter selects a response to an - input statement by selecting the closest known matching - statement based on the Levenshtein Distance between the text + The ClosestMatchAdapter logic adapter selects a known response + to an input by searching for a known statement that most closely + matches the input based on the Levenshtein Distance between the text of each statement.
--- a/chatterbot/adapters/logic/closest_match.py +++ b/chatterbot/adapters/logic/closest_match.py @@ ... @@ from fuzzywuzzy import fuzz - from .base_match import BaseMatchAdapter @@ ... @@ """ - The ClosestMatchAdapter logic adapter creates a response by - using fuzzywuzzy's process class to extract the most similar - response to the input. This adapter selects a response to an - input statement by selecting the closest known matching - statement based on the Levenshtein Distance between the text + The ClosestMatchAdapter logic adapter selects a known response + to an input by searching for a known statement that most closely + matches the input based on the Levenshtein Distance between the text of each statement.
--- a/chatterbot/adapters/logic/closest_match.py +++ b/chatterbot/adapters/logic/closest_match.py @@ -2,3 +2,2 @@ CON from fuzzywuzzy import fuzz DEL CON from .base_match import BaseMatchAdapter @@ -8,7 +7,5 @@ CON """ DEL The ClosestMatchAdapter logic adapter creates a response by DEL using fuzzywuzzy's process class to extract the most similar DEL response to the input. This adapter selects a response to an DEL input statement by selecting the closest known matching DEL statement based on the Levenshtein Distance between the text ADD The ClosestMatchAdapter logic adapter selects a known response ADD to an input by searching for a known statement that most closely ADD matches the input based on the Levenshtein Distance between the text CON of each statement.
<<<<<<< SEARCH # -*- coding: utf-8 -*- from fuzzywuzzy import fuzz from .base_match import BaseMatchAdapter class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter logic adapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ ======= # -*- coding: utf-8 -*- from fuzzywuzzy import fuzz from .base_match import BaseMatchAdapter class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter logic adapter selects a known response to an input by searching for a known statement that most closely matches the input based on the Levenshtein Distance between the text of each statement. """ >>>>>>> REPLACE
mchung94/latest-versions
b0814b95ea854f7b3f0b9db48ae9beee078c2a30
versions/software/openjdk.py
python
mit
Update OpenJDK version to support both 8 and 9.
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def downloadable_version(url): """Strip the version out of the Zulu OpenJDK manual download link.""" # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip filename = url[url.rfind('/') + 1:] jdk_version = get_text_between(filename, '-jdk', '-') version, update = jdk_version.rsplit('.', 1) return f'1.{version}_{update}' def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: div = soup.find('div', class_='latest_area') if div: zip_filename = re.compile('\.zip$') tag = div.find('a', class_='r-download', href=zip_filename) if tag: return downloadable_version(tag.attrs['href']) return 'Unknown'
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: zip_filename = re.compile('\.zip$') for tag in soup.find_all('a', class_='r-download', href=zip_filename): filename = tag.attrs['href'] zulu = get_text_between(filename, 'bin/zulu', '-') jdk = get_text_between(filename, 'jdk', '-') if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): return zulu elif installed[0] == '1' and jdk[0] == installed[2]: version, update = jdk.rsplit('.', 1) return f'1.{version}_{update}' return 'Unknown'
12
15
2
mixed
--- a/versions/software/openjdk.py +++ b/versions/software/openjdk.py @@ -15,2 +15,3 @@ version_string = get_command_stderr(('java', '-version')) + # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') @@ -20,21 +21,17 @@ -def downloadable_version(url): - """Strip the version out of the Zulu OpenJDK manual download link.""" - # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip - filename = url[url.rfind('/') + 1:] - jdk_version = get_text_between(filename, '-jdk', '-') - version, update = jdk_version.rsplit('.', 1) - return f'1.{version}_{update}' - - def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" + installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: - div = soup.find('div', class_='latest_area') - if div: - zip_filename = re.compile('\.zip$') - tag = div.find('a', class_='r-download', href=zip_filename) - if tag: - return downloadable_version(tag.attrs['href']) + zip_filename = re.compile('\.zip$') + for tag in soup.find_all('a', class_='r-download', href=zip_filename): + filename = tag.attrs['href'] + zulu = get_text_between(filename, 'bin/zulu', '-') + jdk = get_text_between(filename, 'jdk', '-') + if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): + return zulu + elif installed[0] == '1' and jdk[0] == installed[2]: + version, update = jdk.rsplit('.', 1) + return f'1.{version}_{update}' return 'Unknown'
--- a/versions/software/openjdk.py +++ b/versions/software/openjdk.py @@ ... @@ version_string = get_command_stderr(('java', '-version')) + # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') @@ ... @@ -def downloadable_version(url): - """Strip the version out of the Zulu OpenJDK manual download link.""" - # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip - filename = url[url.rfind('/') + 1:] - jdk_version = get_text_between(filename, '-jdk', '-') - version, update = jdk_version.rsplit('.', 1) - return f'1.{version}_{update}' - - def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" + installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: - div = soup.find('div', class_='latest_area') - if div: - zip_filename = re.compile('\.zip$') - tag = div.find('a', class_='r-download', href=zip_filename) - if tag: - return downloadable_version(tag.attrs['href']) + zip_filename = re.compile('\.zip$') + for tag in soup.find_all('a', class_='r-download', href=zip_filename): + filename = tag.attrs['href'] + zulu = get_text_between(filename, 'bin/zulu', '-') + jdk = get_text_between(filename, 'jdk', '-') + if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): + return zulu + elif installed[0] == '1' and jdk[0] == installed[2]: + version, update = jdk.rsplit('.', 1) + return f'1.{version}_{update}' return 'Unknown'
--- a/versions/software/openjdk.py +++ b/versions/software/openjdk.py @@ -15,2 +15,3 @@ CON version_string = get_command_stderr(('java', '-version')) ADD # "1.8.0_162" or "9.0.4.1" for example CON return get_text_between(version_string, '"', '"') @@ -20,21 +21,17 @@ CON DEL def downloadable_version(url): DEL """Strip the version out of the Zulu OpenJDK manual download link.""" DEL # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip DEL filename = url[url.rfind('/') + 1:] DEL jdk_version = get_text_between(filename, '-jdk', '-') DEL version, update = jdk_version.rsplit('.', 1) DEL return f'1.{version}_{update}' DEL DEL CON def latest_version(): CON """Return the latest version of Zulu OpenJDK available for download.""" ADD installed = installed_version() CON soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') CON if soup: DEL div = soup.find('div', class_='latest_area') DEL if div: DEL zip_filename = re.compile('\.zip$') DEL tag = div.find('a', class_='r-download', href=zip_filename) DEL if tag: DEL return downloadable_version(tag.attrs['href']) ADD zip_filename = re.compile('\.zip$') ADD for tag in soup.find_all('a', class_='r-download', href=zip_filename): ADD filename = tag.attrs['href'] ADD zulu = get_text_between(filename, 'bin/zulu', '-') ADD jdk = get_text_between(filename, 'jdk', '-') ADD if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): ADD return zulu ADD elif installed[0] == '1' and jdk[0] == installed[2]: ADD version, update = jdk.rsplit('.', 1) ADD return f'1.{version}_{update}' CON return 'Unknown'
<<<<<<< SEARCH try: version_string = get_command_stderr(('java', '-version')) return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def downloadable_version(url): """Strip the version out of the Zulu OpenJDK manual download link.""" # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip filename = url[url.rfind('/') + 1:] jdk_version = get_text_between(filename, '-jdk', '-') version, update = jdk_version.rsplit('.', 1) return f'1.{version}_{update}' def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: div = soup.find('div', class_='latest_area') if div: zip_filename = re.compile('\.zip$') tag = div.find('a', class_='r-download', href=zip_filename) if tag: return downloadable_version(tag.attrs['href']) return 'Unknown' ======= try: version_string = get_command_stderr(('java', '-version')) # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: zip_filename = re.compile('\.zip$') for tag in soup.find_all('a', class_='r-download', href=zip_filename): filename = tag.attrs['href'] zulu = get_text_between(filename, 'bin/zulu', '-') jdk = get_text_between(filename, 'jdk', '-') if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): return zulu elif installed[0] == '1' and jdk[0] == installed[2]: version, update = jdk.rsplit('.', 1) return f'1.{version}_{update}' return 'Unknown' >>>>>>> REPLACE
google/evergreen-checker
6b15019a023f26228cf0baeb0e4b1a052987e6ab
build.gradle.kts
kotlin
apache-2.0
Update `kotlin_version` from `1.4.21` to `1.5.0`
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import org.jetbrains.kotlin.gradle.tasks.KotlinCompile // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { extra["kotlin_version"] = "1.4.21" repositories { google() jcenter() } dependencies { classpath("com.android.tools.build:gradle:4.1.2") classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?)) // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files } } allprojects { repositories { google() jcenter() } } tasks.withType<KotlinCompile> { kotlinOptions { jvmTarget = "1.8" } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import org.jetbrains.kotlin.gradle.tasks.KotlinCompile // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { extra["kotlin_version"] = "1.5.0" repositories { google() jcenter() } dependencies { classpath("com.android.tools.build:gradle:4.1.2") classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?)) // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files } } allprojects { repositories { google() jcenter() } } tasks.withType<KotlinCompile> { kotlinOptions { jvmTarget = "1.8" } }
1
1
1
mixed
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -19,3 +19,3 @@ buildscript { - extra["kotlin_version"] = "1.4.21" + extra["kotlin_version"] = "1.5.0"
--- a/build.gradle.kts +++ b/build.gradle.kts @@ ... @@ buildscript { - extra["kotlin_version"] = "1.4.21" + extra["kotlin_version"] = "1.5.0"
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -19,3 +19,3 @@ CON buildscript { DEL extra["kotlin_version"] = "1.4.21" ADD extra["kotlin_version"] = "1.5.0" CON
<<<<<<< SEARCH buildscript { extra["kotlin_version"] = "1.4.21" repositories { ======= buildscript { extra["kotlin_version"] = "1.5.0" repositories { >>>>>>> REPLACE
dtolnay/syn
d9e61a5ebe2d3bc0b1077f304a31bf377d9c83d0
tests/test_ty.rs
rust
apache-2.0
Add test for Type containing macro metavariable
use syn::Type; #[test] fn test_mut_self() { syn::parse_str::<Type>("fn(mut self)").unwrap(); syn::parse_str::<Type>("fn(mut self: ())").unwrap(); syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err(); syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err(); syn::parse_str::<Type>("fn(mut self::T)").unwrap_err(); }
#[macro_use] mod macros; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; use quote::quote; use std::iter::FromIterator; use syn::Type; #[test] fn test_mut_self() { syn::parse_str::<Type>("fn(mut self)").unwrap(); syn::parse_str::<Type>("fn(mut self: ())").unwrap(); syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err(); syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err(); syn::parse_str::<Type>("fn(mut self::T)").unwrap_err(); } #[test] fn test_macro_variable_type() { // mimics the token stream corresponding to `$ty<T>` let tokens = TokenStream::from_iter(vec![ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), TokenTree::Punct(Punct::new('<', Spacing::Alone)), TokenTree::Ident(Ident::new("T", Span::call_site())), TokenTree::Punct(Punct::new('>', Spacing::Alone)), ]); snapshot!(tokens as Type, @r###" Type::Path { path: Path { segments: [ PathSegment { ident: "ty", arguments: PathArguments::AngleBracketed { args: [ Type(Type::Path { path: Path { segments: [ PathSegment { ident: "T", arguments: None, }, ], }, }), ], }, }, ], }, } "###); }
43
0
2
add_only
--- a/tests/test_ty.rs +++ b/tests/test_ty.rs @@ -1 +1,7 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; use syn::Type; @@ -10 +16,38 @@ } + +#[test] +fn test_macro_variable_type() { + // mimics the token stream corresponding to `$ty<T>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + TokenTree::Ident(Ident::new("T", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "ty", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + } + "###); +}
--- a/tests/test_ty.rs +++ b/tests/test_ty.rs @@ ... @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; use syn::Type; @@ ... @@ } + +#[test] +fn test_macro_variable_type() { + // mimics the token stream corresponding to `$ty<T>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + TokenTree::Ident(Ident::new("T", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "ty", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + } + "###); +}
--- a/tests/test_ty.rs +++ b/tests/test_ty.rs @@ -1 +1,7 @@ ADD #[macro_use] ADD mod macros; ADD ADD use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; ADD use quote::quote; ADD use std::iter::FromIterator; CON use syn::Type; @@ -10 +16,38 @@ CON } ADD ADD #[test] ADD fn test_macro_variable_type() { ADD // mimics the token stream corresponding to `$ty<T>` ADD let tokens = TokenStream::from_iter(vec![ ADD TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), ADD TokenTree::Punct(Punct::new('<', Spacing::Alone)), ADD TokenTree::Ident(Ident::new("T", Span::call_site())), ADD TokenTree::Punct(Punct::new('>', Spacing::Alone)), ADD ]); ADD ADD snapshot!(tokens as Type, @r###" ADD Type::Path { ADD path: Path { ADD segments: [ ADD PathSegment { ADD ident: "ty", ADD arguments: PathArguments::AngleBracketed { ADD args: [ ADD Type(Type::Path { ADD path: Path { ADD segments: [ ADD PathSegment { ADD ident: "T", ADD arguments: None, ADD }, ADD ], ADD }, ADD }), ADD ], ADD }, ADD }, ADD ], ADD }, ADD } ADD "###); ADD }
<<<<<<< SEARCH use syn::Type; ======= #[macro_use] mod macros; use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; use quote::quote; use std::iter::FromIterator; use syn::Type; >>>>>>> REPLACE <<<<<<< SEARCH syn::parse_str::<Type>("fn(mut self::T)").unwrap_err(); } ======= syn::parse_str::<Type>("fn(mut self::T)").unwrap_err(); } #[test] fn test_macro_variable_type() { // mimics the token stream corresponding to `$ty<T>` let tokens = TokenStream::from_iter(vec![ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), TokenTree::Punct(Punct::new('<', Spacing::Alone)), TokenTree::Ident(Ident::new("T", Span::call_site())), TokenTree::Punct(Punct::new('>', Spacing::Alone)), ]); snapshot!(tokens as Type, @r###" Type::Path { path: Path { segments: [ PathSegment { ident: "ty", arguments: PathArguments::AngleBracketed { args: [ Type(Type::Path { path: Path { segments: [ PathSegment { ident: "T", arguments: None, }, ], }, }), ], }, }, ], }, } "###); } >>>>>>> REPLACE
kvakil/venus
a70c35802d37fff0efdf24235963269731ca93d9
src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
kotlin
mit
Add terminate with exit code
package venus.simulator.impls import venus.riscv.Instruction import venus.simulator.Simulator import venus.simulator.InstructionImplementation import venus.glue.Renderer object ECALLImpl : InstructionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { val which = sim.getReg(10) when (which) { 1 -> { // print integer val arg = sim.getReg(11) Renderer.printConsole(arg) } 4 -> { // print string var arg = sim.getReg(11) var c = sim.loadByte(arg) arg++ while (c != 0) { Renderer.printConsole(c.toChar()) c = sim.loadByte(arg) arg++ } } 9 -> { // malloc var bytes = sim.getReg(11) if (bytes < 0) return sim.setReg(10, sim.getHeapEnd()) sim.addHeapSpace(bytes) } 10 -> { // exit sim.setPC(Int.MAX_VALUE) return } 11 -> { // print char var arg = sim.getReg(11) Renderer.printConsole(arg.toChar()) } else -> { Renderer.printConsole("Invalid ecall ${which}") } } sim.incrementPC(inst.length) } }
package venus.simulator.impls import venus.riscv.Instruction import venus.simulator.Simulator import venus.simulator.InstructionImplementation import venus.glue.Renderer object ECALLImpl : structionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { val which = sim.getReg(10) when (which) { 1 -> { // print integer val arg = sim.getReg(11) Renderer.printConsole(arg) } 4 -> { // print string var arg = sim.getReg(11) var c = sim.loadByte(arg) arg++ while (c != 0) { Renderer.printConsole(c.toChar()) c = sim.loadByte(arg) arg++ } } 9 -> { // malloc var bytes = sim.getReg(11) if (bytes < 0) return sim.setReg(10, sim.getHeapEnd()) sim.addHeapSpace(bytes) } 10 -> { // exit sim.setPC(Int.MAX_VALUE) return } 11 -> { // print char var arg = sim.getReg(11) Renderer.printConsole(arg.toChar()) } 17 -> { // terminate with error code sim.setPC(Int.MAX_VALUE) val retVal = sim.getReg(11) Renderer.printConsole("Exited with error code $retVal\n") } else -> { Renderer.printConsole("Invalid ecall ${which}") } } sim.incrementPC(inst.length) } }
6
1
2
mixed
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt +++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt @@ -7,3 +7,3 @@ -object ECALLImpl : InstructionImplementation { +object ECALLImpl : structionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { @@ -39,2 +39,7 @@ } + 17 -> { // terminate with error code + sim.setPC(Int.MAX_VALUE) + val retVal = sim.getReg(11) + Renderer.printConsole("Exited with error code $retVal\n") + } else -> {
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt +++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt @@ ... @@ -object ECALLImpl : InstructionImplementation { +object ECALLImpl : structionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { @@ ... @@ } + 17 -> { // terminate with error code + sim.setPC(Int.MAX_VALUE) + val retVal = sim.getReg(11) + Renderer.printConsole("Exited with error code $retVal\n") + } else -> {
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt +++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt @@ -7,3 +7,3 @@ CON DEL object ECALLImpl : InstructionImplementation { ADD object ECALLImpl : structionImplementation { CON override operator fun invoke(inst: Instruction, sim: Simulator) { @@ -39,2 +39,7 @@ CON } ADD 17 -> { // terminate with error code ADD sim.setPC(Int.MAX_VALUE) ADD val retVal = sim.getReg(11) ADD Renderer.printConsole("Exited with error code $retVal\n") ADD } CON else -> {
<<<<<<< SEARCH import venus.glue.Renderer object ECALLImpl : InstructionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { val which = sim.getReg(10) ======= import venus.glue.Renderer object ECALLImpl : structionImplementation { override operator fun invoke(inst: Instruction, sim: Simulator) { val which = sim.getReg(10) >>>>>>> REPLACE <<<<<<< SEARCH Renderer.printConsole(arg.toChar()) } else -> { Renderer.printConsole("Invalid ecall ${which}") ======= Renderer.printConsole(arg.toChar()) } 17 -> { // terminate with error code sim.setPC(Int.MAX_VALUE) val retVal = sim.getReg(11) Renderer.printConsole("Exited with error code $retVal\n") } else -> { Renderer.printConsole("Invalid ecall ${which}") >>>>>>> REPLACE
carnesen/mathjs-app
b710962f59a12613deb5e9197bd166dad19161b1
webpack.config.babel.js
javascript
mit
Purge style loader from webpack config
import path from 'path' import webpack from 'webpack' const { NODE_ENV } = process.env const production = NODE_ENV === 'production' const plugins = [ new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify(NODE_ENV) }) ] let extension = '.js' if (production) { plugins.push(new webpack.optimize.UglifyJsPlugin()) extension = '.min.js' } module.exports = [ { entry: [ 'babel-polyfill', 'whatwg-fetch', path.join(__dirname, 'src', 'browser', 'index.js') ], output: { path: path.join(__dirname, 'dist'), filename: `bundle${extension}` }, plugins, module: { loaders: [ { test: /.js$/, loader: 'babel-loader', exclude: /node_modules/ }, { test: /\.json$/, loader: 'json-loader' }, { test: /\.css$/, loader: 'style-loader!css-loader' } ] } } ]
import path from 'path' import webpack from 'webpack' const { NODE_ENV } = process.env const production = NODE_ENV === 'production' const plugins = [ new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify(NODE_ENV) }) ] let extension = '.js' if (production) { plugins.push(new webpack.optimize.UglifyJsPlugin()) extension = '.min.js' } module.exports = [ { entry: [ 'babel-polyfill', 'whatwg-fetch', path.join(__dirname, 'src', 'browser', 'index.js') ], output: { path: path.join(__dirname, 'dist'), filename: `bundle${extension}` }, plugins, module: { loaders: [ { test: /.js$/, loader: 'babel-loader', exclude: /node_modules/ }, { test: /\.json$/, loader: 'json-loader' } ] } } ]
1
2
1
mixed
--- a/webpack.config.babel.js +++ b/webpack.config.babel.js @@ -37,4 +37,3 @@ }, - { test: /\.json$/, loader: 'json-loader' }, - { test: /\.css$/, loader: 'style-loader!css-loader' } + { test: /\.json$/, loader: 'json-loader' } ]
--- a/webpack.config.babel.js +++ b/webpack.config.babel.js @@ ... @@ }, - { test: /\.json$/, loader: 'json-loader' }, - { test: /\.css$/, loader: 'style-loader!css-loader' } + { test: /\.json$/, loader: 'json-loader' } ]
--- a/webpack.config.babel.js +++ b/webpack.config.babel.js @@ -37,4 +37,3 @@ CON }, DEL { test: /\.json$/, loader: 'json-loader' }, DEL { test: /\.css$/, loader: 'style-loader!css-loader' } ADD { test: /\.json$/, loader: 'json-loader' } CON ]
<<<<<<< SEARCH exclude: /node_modules/ }, { test: /\.json$/, loader: 'json-loader' }, { test: /\.css$/, loader: 'style-loader!css-loader' } ] } ======= exclude: /node_modules/ }, { test: /\.json$/, loader: 'json-loader' } ] } >>>>>>> REPLACE
rbartoli/react-boilerplate
ba9b586cd4567b739cbe94e51b47e1f933ae1242
webpack.config.js
javascript
mit
Change log level to info
var path = require('path'); var devConfig = { context: path.join(__dirname, '/app'), entry: [ './app.js' ], output: { path: path.join(__dirname, '/build/'), publicPath: '/public/assets/js/', filename: 'app.js', }, devtool: 'eval-source-map', devServer: { contentBase: 'public', historyApiFallback: false, stats: 'errors-only' }, module: { loaders: [ { test: /\.js$/, exclude: /node_modules/, loaders: ['react-hot', 'babel'], }, { test: /\.css$/, exclude: /node_modules/, loaders: ['style', 'css'] }, { test: /\.scss$/, exclude: /node_modules/, loaders: ['style', 'css', 'sass'] }, { test: /\.(jpg|png|ttf|eot|woff|woff2|svg)$/, exclude: /node_modules/, loader: 'url?limit=100000' } ] } } if (process.env.NODE_ENV === 'production') { devConfig.devtool = ''; devConfig.devServer = {}; }; module.exports = devConfig;
var path = require('path'); var devConfig = { context: path.join(__dirname, '/app'), entry: [ './app.js' ], output: { path: path.join(__dirname, '/build/'), publicPath: '/public/assets/js/', filename: 'app.js', }, devtool: 'eval-source-map', devServer: { contentBase: 'public', historyApiFallback: false }, module: { loaders: [ { test: /\.js$/, exclude: /node_modules/, loaders: ['react-hot', 'babel'], }, { test: /\.css$/, exclude: /node_modules/, loaders: ['style', 'css'] }, { test: /\.scss$/, exclude: /node_modules/, loaders: ['style', 'css', 'sass'] }, { test: /\.(jpg|png|ttf|eot|woff|woff2|svg)$/, exclude: /node_modules/, loader: 'url?limit=100000' } ] } } if (process.env.NODE_ENV === 'production') { devConfig.devtool = ''; devConfig.devServer = {}; }; module.exports = devConfig;
1
3
1
mixed
--- a/webpack.config.js +++ b/webpack.config.js @@ -15,5 +15,3 @@ contentBase: 'public', - historyApiFallback: false, - - stats: 'errors-only' + historyApiFallback: false },
--- a/webpack.config.js +++ b/webpack.config.js @@ ... @@ contentBase: 'public', - historyApiFallback: false, - - stats: 'errors-only' + historyApiFallback: false },
--- a/webpack.config.js +++ b/webpack.config.js @@ -15,5 +15,3 @@ CON contentBase: 'public', DEL historyApiFallback: false, DEL DEL stats: 'errors-only' ADD historyApiFallback: false CON },
<<<<<<< SEARCH devServer: { contentBase: 'public', historyApiFallback: false, stats: 'errors-only' }, module: { ======= devServer: { contentBase: 'public', historyApiFallback: false }, module: { >>>>>>> REPLACE
androidx/androidx
7439b9a7bdf87fca17db852766191cee05fceb37
lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
kotlin
apache-2.0
Fix broken Lint version check for lifecycle Bug: 189211535 Test: ApiLintVersionsTest#versionsCheck Change-Id: I6b87f10803b615ab6a4e305883b7a5d9f118b77f
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.lifecycle.lint import com.android.tools.lint.client.api.LintClient import com.android.tools.lint.detector.api.CURRENT_API import org.junit.Assert import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(JUnit4::class) class ApiLintVersionsTest { @Test fun versionsCheck() { LintClient.clientName = LintClient.CLIENT_UNIT_TESTS val registry = LifecycleRuntimeIssueRegistry() // we hardcode version registry.api to the version that is used to run tests Assert.assertEquals(CURRENT_API, registry.api) // Intentionally fails in IDE, because we use different API version in // studio and command line Assert.assertEquals(3, registry.minApi) } }
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.lifecycle.lint import com.android.tools.lint.client.api.LintClient import com.android.tools.lint.detector.api.CURRENT_API import org.junit.Assert import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(JUnit4::class) class ApiLintVersionsTest { @Test fun versionsCheck() { LintClient.clientName = LintClient.CLIENT_UNIT_TESTS val registry = LifecycleRuntimeIssueRegistry() // we hardcode version registry.api to the version that is used to run tests Assert.assertEquals(CURRENT_API, registry.api) // Intentionally fails in IDE, because we use different API version in // studio and command line Assert.assertEquals(8, registry.minApi) } }
1
1
1
mixed
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt +++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt @@ -37,3 +37,3 @@ // studio and command line - Assert.assertEquals(3, registry.minApi) + Assert.assertEquals(8, registry.minApi) }
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt +++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt @@ ... @@ // studio and command line - Assert.assertEquals(3, registry.minApi) + Assert.assertEquals(8, registry.minApi) }
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt +++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt @@ -37,3 +37,3 @@ CON // studio and command line DEL Assert.assertEquals(3, registry.minApi) ADD Assert.assertEquals(8, registry.minApi) CON }
<<<<<<< SEARCH // Intentionally fails in IDE, because we use different API version in // studio and command line Assert.assertEquals(3, registry.minApi) } } ======= // Intentionally fails in IDE, because we use different API version in // studio and command line Assert.assertEquals(8, registry.minApi) } } >>>>>>> REPLACE
dirvine/rust-utp
3a8fa9325a54a4ca4837cc63a577f4cf9c78056c
src/lib.rs
rust
apache-2.0
Add example to module documentation.
//! Implementation of the Micro Transport Protocol.[^spec] //! //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html // __________ ____ ____ // /_ __/ __ \/ __ \/ __ \ // / / / / / / / / / / / / // / / / /_/ / /_/ / /_/ / // /_/ \____/_____/\____/ // // - Lossy UDP socket for testing purposes: send and receive ops are wrappers // that stochastically drop or reorder packets. // - Sending FIN on drop // - Handle packet loss // - Path MTU discovery (RFC4821) #![deny(missing_docs)] #![feature(core, collections, io)] extern crate rand; extern crate time; #[macro_use] extern crate log; // Public API pub use socket::UtpSocket; pub use stream::UtpStream; mod util; mod bit_iterator; mod packet; mod socket; mod stream;
//! Implementation of the Micro Transport Protocol.[^spec] //! //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html //! # Examples //! //! ``` //! extern crate utp; //! //! use utp::UtpStream; //! use std::io::{Read, Write}; //! //! fn main() { //! // Connect to an hypothetical local server running on port 8080 //! let addr = "127.0.0.1:8080"; //! let mut stream = match UtpStream::connect(addr) { //! Ok(stream) => stream, //! Err(e) => panic!("{}", e), //! }; //! //! // Send a string //! match stream.write("Hi there!".as_bytes()) { //! Ok(_) => (), //! Err(e) => println!("Write failed with {}", e) //! } //! //! // Close the stream //! match stream.close() { //! Ok(()) => println!("Connection closed"), //! Err(e) => println!("{}", e) //! } //! } //! ``` // __________ ____ ____ // /_ __/ __ \/ __ \/ __ \ // / / / / / / / / / / / / // / / / /_/ / /_/ / /_/ / // /_/ \____/_____/\____/ // // - Lossy UDP socket for testing purposes: send and receive ops are wrappers // that stochastically drop or reorder packets. // - Sending FIN on drop // - Handle packet loss // - Path MTU discovery (RFC4821) #![deny(missing_docs)] #![feature(core, collections, io)] extern crate rand; extern crate time; #[macro_use] extern crate log; // Public API pub use socket::UtpSocket; pub use stream::UtpStream; mod util; mod bit_iterator; mod packet; mod socket; mod stream;
30
0
1
add_only
--- a/src/lib.rs +++ b/src/lib.rs @@ -3,2 +3,32 @@ //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html + +//! # Examples +//! +//! ``` +//! extern crate utp; +//! +//! use utp::UtpStream; +//! use std::io::{Read, Write}; +//! +//! fn main() { +//! // Connect to an hypothetical local server running on port 8080 +//! let addr = "127.0.0.1:8080"; +//! let mut stream = match UtpStream::connect(addr) { +//! Ok(stream) => stream, +//! Err(e) => panic!("{}", e), +//! }; +//! +//! // Send a string +//! match stream.write("Hi there!".as_bytes()) { +//! Ok(_) => (), +//! Err(e) => println!("Write failed with {}", e) +//! } +//! +//! // Close the stream +//! match stream.close() { +//! Ok(()) => println!("Connection closed"), +//! Err(e) => println!("{}", e) +//! } +//! } +//! ```
--- a/src/lib.rs +++ b/src/lib.rs @@ ... @@ //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html + +//! # Examples +//! +//! ``` +//! extern crate utp; +//! +//! use utp::UtpStream; +//! use std::io::{Read, Write}; +//! +//! fn main() { +//! // Connect to an hypothetical local server running on port 8080 +//! let addr = "127.0.0.1:8080"; +//! let mut stream = match UtpStream::connect(addr) { +//! Ok(stream) => stream, +//! Err(e) => panic!("{}", e), +//! }; +//! +//! // Send a string +//! match stream.write("Hi there!".as_bytes()) { +//! Ok(_) => (), +//! Err(e) => println!("Write failed with {}", e) +//! } +//! +//! // Close the stream +//! match stream.close() { +//! Ok(()) => println!("Connection closed"), +//! Err(e) => println!("{}", e) +//! } +//! } +//! ```
--- a/src/lib.rs +++ b/src/lib.rs @@ -3,2 +3,32 @@ CON //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html ADD ADD //! # Examples ADD //! ADD //! ``` ADD //! extern crate utp; ADD //! ADD //! use utp::UtpStream; ADD //! use std::io::{Read, Write}; ADD //! ADD //! fn main() { ADD //! // Connect to an hypothetical local server running on port 8080 ADD //! let addr = "127.0.0.1:8080"; ADD //! let mut stream = match UtpStream::connect(addr) { ADD //! Ok(stream) => stream, ADD //! Err(e) => panic!("{}", e), ADD //! }; ADD //! ADD //! // Send a string ADD //! match stream.write("Hi there!".as_bytes()) { ADD //! Ok(_) => (), ADD //! Err(e) => println!("Write failed with {}", e) ADD //! } ADD //! ADD //! // Close the stream ADD //! match stream.close() { ADD //! Ok(()) => println!("Connection closed"), ADD //! Err(e) => println!("{}", e) ADD //! } ADD //! } ADD //! ``` CON
<<<<<<< SEARCH //! //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html // __________ ____ ____ ======= //! //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html //! # Examples //! //! ``` //! extern crate utp; //! //! use utp::UtpStream; //! use std::io::{Read, Write}; //! //! fn main() { //! // Connect to an hypothetical local server running on port 8080 //! let addr = "127.0.0.1:8080"; //! let mut stream = match UtpStream::connect(addr) { //! Ok(stream) => stream, //! Err(e) => panic!("{}", e), //! }; //! //! // Send a string //! match stream.write("Hi there!".as_bytes()) { //! Ok(_) => (), //! Err(e) => println!("Write failed with {}", e) //! } //! //! // Close the stream //! match stream.close() { //! Ok(()) => println!("Connection closed"), //! Err(e) => println!("{}", e) //! } //! } //! ``` // __________ ____ ____ >>>>>>> REPLACE
orekyuu/intellij-community
5a9210545798d7590ab786fb49f82078b3a9afc6
runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
java
apache-2.0
Upgrade to JUnit 4.0: Fixing Vector -> List update consequences.
package com.intellij.rt.execution.junit2; import junit.framework.TestResult; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.framework.Test; import java.util.Hashtable; import java.util.Enumeration; public class RunOnce extends TestResult { private Hashtable myPeformedTests = new Hashtable(); private static final String NOT_ALLOWED_IN_ID = ":"; protected void run(TestCase test) { if (test.getClass().getName().startsWith(TestSuite.class.getName())) { super.run(test); } else { String testKey = keyOf(test); if (!myPeformedTests.containsKey(testKey)) { super.run(test); myPeformedTests.put(testKey, test); } else { fireTestSkipped(test, (Test)myPeformedTests.get(testKey)); } } } private void fireTestSkipped(TestCase test, Test peformedTest) { for (Enumeration each = fListeners.elements(); each.hasMoreElements();) { Object listener = each.nextElement(); if (listener instanceof TestSkippingListener) ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); } } private String keyOf(TestCase test) { return test.getClass().getName() + NOT_ALLOWED_IN_ID + test.getName() + NOT_ALLOWED_IN_ID + test.toString(); } }
package com.intellij.rt.execution.junit2; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestResult; import junit.framework.TestSuite; import java.util.Hashtable; public class RunOnce extends TestResult { private Hashtable myPeformedTests = new Hashtable(); private static final String NOT_ALLOWED_IN_ID = ":"; protected void run(TestCase test) { if (test.getClass().getName().startsWith(TestSuite.class.getName())) { super.run(test); } else { String testKey = keyOf(test); if (!myPeformedTests.containsKey(testKey)) { super.run(test); myPeformedTests.put(testKey, test); } else { fireTestSkipped(test, (Test)myPeformedTests.get(testKey)); } } } private void fireTestSkipped(TestCase test, Test peformedTest) { for (int i = 0; i < fListeners.size(); i++) { Object listener = fListeners.get(i); if (listener instanceof TestSkippingListener) { ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); } } } private String keyOf(TestCase test) { return test.getClass().getName() + NOT_ALLOWED_IN_ID + test.getName() + NOT_ALLOWED_IN_ID + test.toString(); } }
8
8
3
mixed
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java +++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java @@ -2,9 +2,8 @@ +import junit.framework.Test; +import junit.framework.TestCase; import junit.framework.TestResult; -import junit.framework.TestCase; import junit.framework.TestSuite; -import junit.framework.Test; import java.util.Hashtable; -import java.util.Enumeration; @@ -29,6 +28,7 @@ private void fireTestSkipped(TestCase test, Test peformedTest) { - for (Enumeration each = fListeners.elements(); each.hasMoreElements();) { - Object listener = each.nextElement(); - if (listener instanceof TestSkippingListener) + for (int i = 0; i < fListeners.size(); i++) { + Object listener = fListeners.get(i); + if (listener instanceof TestSkippingListener) { ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); + } } @@ -38,4 +38,4 @@ return test.getClass().getName() + NOT_ALLOWED_IN_ID + - test.getName() + NOT_ALLOWED_IN_ID + - test.toString(); + test.getName() + NOT_ALLOWED_IN_ID + + test.toString(); }
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java +++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java @@ ... @@ +import junit.framework.Test; +import junit.framework.TestCase; import junit.framework.TestResult; -import junit.framework.TestCase; import junit.framework.TestSuite; -import junit.framework.Test; import java.util.Hashtable; -import java.util.Enumeration; @@ ... @@ private void fireTestSkipped(TestCase test, Test peformedTest) { - for (Enumeration each = fListeners.elements(); each.hasMoreElements();) { - Object listener = each.nextElement(); - if (listener instanceof TestSkippingListener) + for (int i = 0; i < fListeners.size(); i++) { + Object listener = fListeners.get(i); + if (listener instanceof TestSkippingListener) { ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); + } } @@ ... @@ return test.getClass().getName() + NOT_ALLOWED_IN_ID + - test.getName() + NOT_ALLOWED_IN_ID + - test.toString(); + test.getName() + NOT_ALLOWED_IN_ID + + test.toString(); }
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java +++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java @@ -2,9 +2,8 @@ CON ADD import junit.framework.Test; ADD import junit.framework.TestCase; CON import junit.framework.TestResult; DEL import junit.framework.TestCase; CON import junit.framework.TestSuite; DEL import junit.framework.Test; CON CON import java.util.Hashtable; DEL import java.util.Enumeration; CON @@ -29,6 +28,7 @@ CON private void fireTestSkipped(TestCase test, Test peformedTest) { DEL for (Enumeration each = fListeners.elements(); each.hasMoreElements();) { DEL Object listener = each.nextElement(); DEL if (listener instanceof TestSkippingListener) ADD for (int i = 0; i < fListeners.size(); i++) { ADD Object listener = fListeners.get(i); ADD if (listener instanceof TestSkippingListener) { CON ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); ADD } CON } @@ -38,4 +38,4 @@ CON return test.getClass().getName() + NOT_ALLOWED_IN_ID + DEL test.getName() + NOT_ALLOWED_IN_ID + DEL test.toString(); ADD test.getName() + NOT_ALLOWED_IN_ID + ADD test.toString(); CON }
<<<<<<< SEARCH package com.intellij.rt.execution.junit2; import junit.framework.TestResult; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.framework.Test; import java.util.Hashtable; import java.util.Enumeration; public class RunOnce extends TestResult { ======= package com.intellij.rt.execution.junit2; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestResult; import junit.framework.TestSuite; import java.util.Hashtable; public class RunOnce extends TestResult { >>>>>>> REPLACE <<<<<<< SEARCH private void fireTestSkipped(TestCase test, Test peformedTest) { for (Enumeration each = fListeners.elements(); each.hasMoreElements();) { Object listener = each.nextElement(); if (listener instanceof TestSkippingListener) ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); } } private String keyOf(TestCase test) { return test.getClass().getName() + NOT_ALLOWED_IN_ID + test.getName() + NOT_ALLOWED_IN_ID + test.toString(); } } ======= private void fireTestSkipped(TestCase test, Test peformedTest) { for (int i = 0; i < fListeners.size(); i++) { Object listener = fListeners.get(i); if (listener instanceof TestSkippingListener) { ((TestSkippingListener)listener).onTestSkipped(test, peformedTest); } } } private String keyOf(TestCase test) { return test.getClass().getName() + NOT_ALLOWED_IN_ID + test.getName() + NOT_ALLOWED_IN_ID + test.toString(); } } >>>>>>> REPLACE
ATLauncher/Discord-Bot
2bd205f87133ac7b4406514964a35d5d0758e02e
src/watchers/TextSpamWatcher.js
javascript
mit
Add in new Kazuto Kirigia spam
import BaseWatcher from './BaseWatcher'; import config from '../config'; /** * This checks for people spamming text stuff. */ class TextSpamWatcher extends BaseWatcher { usesBypassRules = true; /** * The method this watcher should listen on. * * @type {string[]} */ method = [ 'message', 'messageUpdate' ]; async action(method, message, updatedMessage) { let messageToActUpon = message; if (method === 'messageUpdate') { messageToActUpon = updatedMessage; } const rulesChannel = this.bot.channels.find((channel) => (channel.name === config.rules_channel)); const cleanMessage = messageToActUpon.cleanContent.toLowerCase(); if ( cleanMessage.indexOf('this is cooldog') !== -1 || cleanMessage.indexOf('this is memedog') !== -1 || cleanMessage.indexOf('chrisopeer davies') !== -1 || cleanMessage.indexOf('jessica davies') !== -1 || cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ) { const warningMessage = await messageToActUpon.reply( `Please read the ${rulesChannel} channel. Spamming or encouraging spamming is not allowed.` ); this.addWarningToUser(messageToActUpon); messageToActUpon.delete(); warningMessage.delete(60000); } } } export default TextSpamWatcher;
import BaseWatcher from './BaseWatcher'; import config from '../config'; /** * This checks for people spamming text stuff. */ class TextSpamWatcher extends BaseWatcher { usesBypassRules = true; /** * The method this watcher should listen on. * * @type {string[]} */ method = [ 'message', 'messageUpdate' ]; async action(method, message, updatedMessage) { let messageToActUpon = message; if (method === 'messageUpdate') { messageToActUpon = updatedMessage; } const rulesChannel = this.bot.channels.find((channel) => (channel.name === config.rules_channel)); const cleanMessage = messageToActUpon.cleanContent.toLowerCase(); if ( cleanMessage.indexOf('this is cooldog') !== -1 || cleanMessage.indexOf('this is memedog') !== -1 || cleanMessage.indexOf('chrisopeer davies') !== -1 || cleanMessage.indexOf('jessica davies') !== -1 || cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 || cleanMessage.indexOf('kazuto kirigia') !== -1 ) { const warningMessage = await messageToActUpon.reply( `Please read the ${rulesChannel} channel. Spamming or encouraging spamming is not allowed.` ); this.addWarningToUser(messageToActUpon); messageToActUpon.delete(); warningMessage.delete(60000); } } } export default TextSpamWatcher;
2
1
1
mixed
--- a/src/watchers/TextSpamWatcher.js +++ b/src/watchers/TextSpamWatcher.js @@ -37,3 +37,4 @@ cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || - cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 + cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 || + cleanMessage.indexOf('kazuto kirigia') !== -1 ) {
--- a/src/watchers/TextSpamWatcher.js +++ b/src/watchers/TextSpamWatcher.js @@ ... @@ cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || - cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 + cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 || + cleanMessage.indexOf('kazuto kirigia') !== -1 ) {
--- a/src/watchers/TextSpamWatcher.js +++ b/src/watchers/TextSpamWatcher.js @@ -37,3 +37,4 @@ CON cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || DEL cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ADD cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 || ADD cleanMessage.indexOf('kazuto kirigia') !== -1 CON ) {
<<<<<<< SEARCH cleanMessage.indexOf('jessica davies') !== -1 || cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ) { const warningMessage = await messageToActUpon.reply( ======= cleanMessage.indexOf('jessica davies') !== -1 || cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 || cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 || cleanMessage.indexOf('kazuto kirigia') !== -1 ) { const warningMessage = await messageToActUpon.reply( >>>>>>> REPLACE
dshaps10/full-stack-demo-site
5d35cfe5b2655eca9e60c382a13ad092c3e99df4
server/server.js
javascript
mit
Add routes for posting new products and retrieving list of products
// npm dependencies const express = require('express'); const hbs = require('hbs'); // local packages let {mongoose} = require('./db/mongoose'); let {Product} = require('./db/models/products'); // instantiate Express.js const app = express(); // Tell Handlebars where to look for partials hbs.registerPartials(__dirname + '../../views/partials'); // Set Handlebars as default templating engine app.set('view engine', 'hbs'); // Point app towards stylesheets app.use(express.static(__dirname + '/public')); // root route app.get('/', (req, res) => { res.render('home.hbs', { pageTitle: 'Home Page' }); }); // route for e-commerce site app.get('/shop', (req, res) => { res.render('shop.hbs', { pageTitle: 'E-Commerce Shop' }); }); // Specify port and run local server let port = 3000; app.listen(port, () => { console.log(`listening on ${port}`); }); module.exports = { app }
// npm dependencies const express = require('express'); const hbs = require('hbs'); const bodyParser = require('body-parser'); // local packages let {mongoose} = require('./db/mongoose'); let {Product} = require('./models/products'); // instantiate Express.js const app = express(); // Tell Handlebars where to look for partials hbs.registerPartials(__dirname + '../../views/partials'); // Set Handlebars as default templating engine app.set('view engine', 'hbs'); // Point app towards stylesheets app.use(express.static(__dirname + '/public')); // Allows for JSON-formatted POST requests app.use(bodyParser.json()); // root route app.get('/', (req, res) => { res.render('home.hbs', { pageTitle: 'Home Page' }); }); // route for e-commerce site app.get('/shop', (req, res) => { res.render('shop.hbs', { pageTitle: 'E-Commerce Shop' }); }); // API endpoint for seeding product data app.post('/shop/products', (req, res) => { let product = new Product({ title: req.body.title, description: req.body.description, price: req.body.price }); product.save() .then((doc) => { res.send(doc); }, (e) => { res.send('Could not add product'); }); }) app.get('/shop/products', (req, res) => { Product.find() .then((products) => { res.send(products); }, (e) => { rest.send('Could not retrieve products'); }); }); // Specify port and run local server let port = 3000; app.listen(port, () => { console.log(`listening on ${port}`); }); module.exports = { app }
30
1
4
mixed
--- a/server/server.js +++ b/server/server.js @@ -3,2 +3,3 @@ const hbs = require('hbs'); +const bodyParser = require('body-parser'); @@ -6,3 +7,3 @@ let {mongoose} = require('./db/mongoose'); -let {Product} = require('./db/models/products'); +let {Product} = require('./models/products'); @@ -19,2 +20,5 @@ app.use(express.static(__dirname + '/public')); + +// Allows for JSON-formatted POST requests +app.use(bodyParser.json()); @@ -34,2 +38,27 @@ +// API endpoint for seeding product data +app.post('/shop/products', (req, res) => { + let product = new Product({ + title: req.body.title, + description: req.body.description, + price: req.body.price + }); + + product.save() + .then((doc) => { + res.send(doc); + }, (e) => { + res.send('Could not add product'); + }); +}) + +app.get('/shop/products', (req, res) => { + Product.find() + .then((products) => { + res.send(products); + }, (e) => { + rest.send('Could not retrieve products'); + }); +}); + // Specify port and run local server
--- a/server/server.js +++ b/server/server.js @@ ... @@ const hbs = require('hbs'); +const bodyParser = require('body-parser'); @@ ... @@ let {mongoose} = require('./db/mongoose'); -let {Product} = require('./db/models/products'); +let {Product} = require('./models/products'); @@ ... @@ app.use(express.static(__dirname + '/public')); + +// Allows for JSON-formatted POST requests +app.use(bodyParser.json()); @@ ... @@ +// API endpoint for seeding product data +app.post('/shop/products', (req, res) => { + let product = new Product({ + title: req.body.title, + description: req.body.description, + price: req.body.price + }); + + product.save() + .then((doc) => { + res.send(doc); + }, (e) => { + res.send('Could not add product'); + }); +}) + +app.get('/shop/products', (req, res) => { + Product.find() + .then((products) => { + res.send(products); + }, (e) => { + rest.send('Could not retrieve products'); + }); +}); + // Specify port and run local server
--- a/server/server.js +++ b/server/server.js @@ -3,2 +3,3 @@ CON const hbs = require('hbs'); ADD const bodyParser = require('body-parser'); CON @@ -6,3 +7,3 @@ CON let {mongoose} = require('./db/mongoose'); DEL let {Product} = require('./db/models/products'); ADD let {Product} = require('./models/products'); CON @@ -19,2 +20,5 @@ CON app.use(express.static(__dirname + '/public')); ADD ADD // Allows for JSON-formatted POST requests ADD app.use(bodyParser.json()); CON @@ -34,2 +38,27 @@ CON ADD // API endpoint for seeding product data ADD app.post('/shop/products', (req, res) => { ADD let product = new Product({ ADD title: req.body.title, ADD description: req.body.description, ADD price: req.body.price ADD }); ADD ADD product.save() ADD .then((doc) => { ADD res.send(doc); ADD }, (e) => { ADD res.send('Could not add product'); ADD }); ADD }) ADD ADD app.get('/shop/products', (req, res) => { ADD Product.find() ADD .then((products) => { ADD res.send(products); ADD }, (e) => { ADD rest.send('Could not retrieve products'); ADD }); ADD }); ADD CON // Specify port and run local server
<<<<<<< SEARCH const express = require('express'); const hbs = require('hbs'); // local packages let {mongoose} = require('./db/mongoose'); let {Product} = require('./db/models/products'); // instantiate Express.js ======= const express = require('express'); const hbs = require('hbs'); const bodyParser = require('body-parser'); // local packages let {mongoose} = require('./db/mongoose'); let {Product} = require('./models/products'); // instantiate Express.js >>>>>>> REPLACE <<<<<<< SEARCH // Point app towards stylesheets app.use(express.static(__dirname + '/public')); // root route ======= // Point app towards stylesheets app.use(express.static(__dirname + '/public')); // Allows for JSON-formatted POST requests app.use(bodyParser.json()); // root route >>>>>>> REPLACE <<<<<<< SEARCH }); // Specify port and run local server let port = 3000; ======= }); // API endpoint for seeding product data app.post('/shop/products', (req, res) => { let product = new Product({ title: req.body.title, description: req.body.description, price: req.body.price }); product.save() .then((doc) => { res.send(doc); }, (e) => { res.send('Could not add product'); }); }) app.get('/shop/products', (req, res) => { Product.find() .then((products) => { res.send(products); }, (e) => { rest.send('Could not retrieve products'); }); }); // Specify port and run local server let port = 3000; >>>>>>> REPLACE
zensum/franz
4c3b4267c5b60d94a25373aaa97009da81c502b1
src/main/kotlin/engine/mock/MockConsumerBase.kt
kotlin
mit
Update to MockConsumerActorBase to actually implement ConsumerActor (add arg "scope" to function createWorker).
package franz.engine.mock import franz.JobStateException import franz.JobStatus import franz.Message import franz.engine.ConsumerActor import franz.engine.WorkerFunction import kotlinx.coroutines.runBlocking abstract class MockConsumerActorBase<T, U> : ConsumerActor<T, U> { data class Result( val throwable: Throwable?, val status: JobStatus ) private val internalResults: MutableList<Result> = mutableListOf() fun results() = internalResults.toList() protected var handlers = mutableListOf<(Message<T, U>) -> Unit>() override fun start() = Unit override fun stop() = Unit override fun setJobStatus(msg: Message<T, U>, status: JobStatus) { internalResults.add(Result(throwable = null, status = status)) } private fun setException(e: Throwable) { internalResults.add(Result(e, JobStatus.TransientFailure)) } override fun createWorker(fn: WorkerFunction<T, U>) { worker(this, fn) } private inline fun tryJobStatus(fn: () -> JobStatus) = try { fn() } catch(ex: JobStateException){ ex.result } catch (ex: Exception) { JobStatus.TransientFailure } private fun worker(consumer: ConsumerActor<T, U>, fn: WorkerFunction<T, U>) { consumer.subscribe { setJobStatus(it, tryJobStatus { runBlocking{fn(it) } }) } } fun createFactory() = MockConsumerActorFactory(this) }
package franz.engine.mock import franz.JobStateException import franz.JobStatus import franz.Message import franz.engine.ConsumerActor import franz.engine.WorkerFunction import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.runBlocking abstract class MockConsumerActorBase<T, U> : ConsumerActor<T, U> { data class Result( val throwable: Throwable?, val status: JobStatus ) private val internalResults: MutableList<Result> = mutableListOf() fun results() = internalResults.toList() protected var handlers = mutableListOf<(Message<T, U>) -> Unit>() override fun start() = Unit override fun stop() = Unit override fun setJobStatus(msg: Message<T, U>, status: JobStatus) { internalResults.add(Result(throwable = null, status = status)) } private fun setException(e: Throwable) { internalResults.add(Result(e, JobStatus.TransientFailure)) } override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) { worker(this, fn) } private inline fun tryJobStatus(fn: () -> JobStatus) = try { fn() } catch(ex: JobStateException){ ex.result } catch (ex: Exception) { JobStatus.TransientFailure } private fun worker(consumer: ConsumerActor<T, U>, fn: WorkerFunction<T, U>) { consumer.subscribe { setJobStatus(it, tryJobStatus { runBlocking{fn(it) } }) } } fun createFactory() = MockConsumerActorFactory(this) }
2
1
2
mixed
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt +++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt @@ -7,2 +7,3 @@ import franz.engine.WorkerFunction +import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.runBlocking @@ -32,3 +33,3 @@ - override fun createWorker(fn: WorkerFunction<T, U>) { + override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) { worker(this, fn)
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt +++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt @@ ... @@ import franz.engine.WorkerFunction +import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.runBlocking @@ ... @@ - override fun createWorker(fn: WorkerFunction<T, U>) { + override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) { worker(this, fn)
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt +++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt @@ -7,2 +7,3 @@ CON import franz.engine.WorkerFunction ADD import kotlinx.coroutines.CoroutineScope CON import kotlinx.coroutines.runBlocking @@ -32,3 +33,3 @@ CON DEL override fun createWorker(fn: WorkerFunction<T, U>) { ADD override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) { CON worker(this, fn)
<<<<<<< SEARCH import franz.engine.ConsumerActor import franz.engine.WorkerFunction import kotlinx.coroutines.runBlocking ======= import franz.engine.ConsumerActor import franz.engine.WorkerFunction import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.runBlocking >>>>>>> REPLACE <<<<<<< SEARCH } override fun createWorker(fn: WorkerFunction<T, U>) { worker(this, fn) } ======= } override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) { worker(this, fn) } >>>>>>> REPLACE
Wallacoloo/serde_osc
c8449319aad7a52fc5adefa7eaa29074dbe054d3
examples/to_from_vec.rs
rust
apache-2.0
Update example to conform to new message arg behavior
#[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_bytes; extern crate serde_osc; use serde_bytes::ByteBuf; use serde_osc::{de, ser}; /// Struct we'll serialize. /// This represents a single OSC message with three arguments: /// one of type 'i', 'f' and 'b', encoded in the order they appear in the struct. #[derive(Debug, Deserialize, Serialize)] struct Message { address: String, num_channels: i32, rate: f32, // ByteBuf is the object we use for OSC "blobs". // It's a thin wrapper over Vec<u8> provided by Serde that allows // for more computationally-efficient serialization/deserialization. content: ByteBuf, } fn main() { let message = Message { address: "/audio/play".to_owned(), num_channels: 1, rate: 44100.0f32, content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]) }; println!("Serializing {:?}", message); // Serialize the message to an OSC packet stored in a Vec<u8> let as_vec = ser::to_vec(&message).unwrap(); println!("Serialied to: {:?}", as_vec); // Deserialize an OSC packet contained in a Vec<u8> into the Message struct let received: Message = de::from_vec(&as_vec).unwrap(); println!("Received: {:?}", received); }
#[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_bytes; extern crate serde_osc; use serde_bytes::ByteBuf; use serde_osc::{de, ser}; /// Struct we'll serialize. /// This represents a single OSC message with three arguments: /// one of type 'i', 'f' and 'b', encoded in the order they appear in the struct. #[derive(Debug, Deserialize, Serialize)] struct Message { address: String, // ByteBuf is the object we use for OSC "blobs". // It's a thin wrapper over Vec<u8> provided by Serde that allows // for more computationally-efficient serialization/deserialization. args: (i32, f32, ByteBuf), } fn main() { let message = Message { address: "/audio/play".to_owned(), args: ( 1, 44100.0f32, ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]), ) }; println!("Serializing {:?}", message); // Serialize the message to an OSC packet stored in a Vec<u8> let as_vec = ser::to_vec(&message).unwrap(); println!("Serialied to: {:?}", as_vec); // Deserialize an OSC packet contained in a Vec<u8> into the Message struct let received: Message = de::from_vec(&as_vec).unwrap(); println!("Received: {:?}", received); }
7
6
4
mixed
--- a/examples/to_from_vec.rs +++ b/examples/to_from_vec.rs @@ -15,4 +15,2 @@ address: String, - num_channels: i32, - rate: f32, // ByteBuf is the object we use for OSC "blobs". @@ -20,3 +18,3 @@ // for more computationally-efficient serialization/deserialization. - content: ByteBuf, + args: (i32, f32, ByteBuf), } @@ -26,5 +24,7 @@ address: "/audio/play".to_owned(), - num_channels: 1, - rate: 44100.0f32, - content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]) + args: ( + 1, + 44100.0f32, + ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]), + ) }; @@ -40 +40,2 @@ } +
--- a/examples/to_from_vec.rs +++ b/examples/to_from_vec.rs @@ ... @@ address: String, - num_channels: i32, - rate: f32, // ByteBuf is the object we use for OSC "blobs". @@ ... @@ // for more computationally-efficient serialization/deserialization. - content: ByteBuf, + args: (i32, f32, ByteBuf), } @@ ... @@ address: "/audio/play".to_owned(), - num_channels: 1, - rate: 44100.0f32, - content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]) + args: ( + 1, + 44100.0f32, + ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]), + ) }; @@ ... @@ } +
--- a/examples/to_from_vec.rs +++ b/examples/to_from_vec.rs @@ -15,4 +15,2 @@ CON address: String, DEL num_channels: i32, DEL rate: f32, CON // ByteBuf is the object we use for OSC "blobs". @@ -20,3 +18,3 @@ CON // for more computationally-efficient serialization/deserialization. DEL content: ByteBuf, ADD args: (i32, f32, ByteBuf), CON } @@ -26,5 +24,7 @@ CON address: "/audio/play".to_owned(), DEL num_channels: 1, DEL rate: 44100.0f32, DEL content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]) ADD args: ( ADD 1, ADD 44100.0f32, ADD ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]), ADD ) CON }; @@ -40 +40,2 @@ CON } ADD
<<<<<<< SEARCH struct Message { address: String, num_channels: i32, rate: f32, // ByteBuf is the object we use for OSC "blobs". // It's a thin wrapper over Vec<u8> provided by Serde that allows // for more computationally-efficient serialization/deserialization. content: ByteBuf, } fn main() { let message = Message { address: "/audio/play".to_owned(), num_channels: 1, rate: 44100.0f32, content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]) }; println!("Serializing {:?}", message); ======= struct Message { address: String, // ByteBuf is the object we use for OSC "blobs". // It's a thin wrapper over Vec<u8> provided by Serde that allows // for more computationally-efficient serialization/deserialization. args: (i32, f32, ByteBuf), } fn main() { let message = Message { address: "/audio/play".to_owned(), args: ( 1, 44100.0f32, ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]), ) }; println!("Serializing {:?}", message); >>>>>>> REPLACE <<<<<<< SEARCH println!("Received: {:?}", received); } ======= println!("Received: {:?}", received); } >>>>>>> REPLACE
mrjmad/invocations
fc75f5843af70c09e0d63284277bf88689cbb06d
invocations/docs.py
python
bsd-2-clause
Add apidoc to doc building
import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def docs(clean=False, browse=False): if clean: clean_docs.body() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs.body()
import os from invoke.tasks import task from invoke.runner import run docs_dir = 'docs' build = os.path.join(docs_dir, '_build') @task def clean_docs(): run("rm -rf %s" % build) @task def browse_docs(): run("open %s" % os.path.join(build, 'index.html')) @task def api_docs(target, output="api", exclude=""): """ Runs ``sphinx-apidoc`` to autogenerate your API docs. Must give target directory/package as ``target``. Results are written out to ``docs/<output>`` (``docs/api`` by default). To exclude certain output files from the final build give ``exclude`` as a comma separated list of file paths. """ output = os.path.join('docs', output) # Have to make these absolute or apidoc is dumb :( exclude = map( lambda x: os.path.abspath(os.path.join(os.getcwd(), x)), exclude.split(',') ) run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude))) @task def docs(clean=False, browse=False, api_target=None, api_output=None, api_exclude=None): """ Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing. Can also build API docs by giving ``api_target`` and optionally ``api_output`` and/or ``api_exclude``. """ if api_target: kwargs = {'target': api_target} if api_output: kwargs['output'] = api_output if api_exclude: kwargs['exclude'] = api_exclude api_docs.body(**kwargs) if clean: clean_docs.body() run("sphinx-build %s %s" % (docs_dir, build), pty=True) if browse: browse_docs.body()
35
1
1
mixed
--- a/invocations/docs.py +++ b/invocations/docs.py @@ -21,3 +21,37 @@ @task -def docs(clean=False, browse=False): +def api_docs(target, output="api", exclude=""): + """ + Runs ``sphinx-apidoc`` to autogenerate your API docs. + + Must give target directory/package as ``target``. Results are written out + to ``docs/<output>`` (``docs/api`` by default). + + To exclude certain output files from the final build give ``exclude`` as a + comma separated list of file paths. + """ + output = os.path.join('docs', output) + # Have to make these absolute or apidoc is dumb :( + exclude = map( + lambda x: os.path.abspath(os.path.join(os.getcwd(), x)), + exclude.split(',') + ) + run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude))) + + +@task +def docs(clean=False, browse=False, api_target=None, api_output=None, + api_exclude=None): + """ + Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing. + + Can also build API docs by giving ``api_target`` and optionally + ``api_output`` and/or ``api_exclude``. + """ + if api_target: + kwargs = {'target': api_target} + if api_output: + kwargs['output'] = api_output + if api_exclude: + kwargs['exclude'] = api_exclude + api_docs.body(**kwargs) if clean:
--- a/invocations/docs.py +++ b/invocations/docs.py @@ ... @@ @task -def docs(clean=False, browse=False): +def api_docs(target, output="api", exclude=""): + """ + Runs ``sphinx-apidoc`` to autogenerate your API docs. + + Must give target directory/package as ``target``. Results are written out + to ``docs/<output>`` (``docs/api`` by default). + + To exclude certain output files from the final build give ``exclude`` as a + comma separated list of file paths. + """ + output = os.path.join('docs', output) + # Have to make these absolute or apidoc is dumb :( + exclude = map( + lambda x: os.path.abspath(os.path.join(os.getcwd(), x)), + exclude.split(',') + ) + run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude))) + + +@task +def docs(clean=False, browse=False, api_target=None, api_output=None, + api_exclude=None): + """ + Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing. + + Can also build API docs by giving ``api_target`` and optionally + ``api_output`` and/or ``api_exclude``. + """ + if api_target: + kwargs = {'target': api_target} + if api_output: + kwargs['output'] = api_output + if api_exclude: + kwargs['exclude'] = api_exclude + api_docs.body(**kwargs) if clean:
--- a/invocations/docs.py +++ b/invocations/docs.py @@ -21,3 +21,37 @@ CON @task DEL def docs(clean=False, browse=False): ADD def api_docs(target, output="api", exclude=""): ADD """ ADD Runs ``sphinx-apidoc`` to autogenerate your API docs. ADD ADD Must give target directory/package as ``target``. Results are written out ADD to ``docs/<output>`` (``docs/api`` by default). ADD ADD To exclude certain output files from the final build give ``exclude`` as a ADD comma separated list of file paths. ADD """ ADD output = os.path.join('docs', output) ADD # Have to make these absolute or apidoc is dumb :( ADD exclude = map( ADD lambda x: os.path.abspath(os.path.join(os.getcwd(), x)), ADD exclude.split(',') ADD ) ADD run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude))) ADD ADD ADD @task ADD def docs(clean=False, browse=False, api_target=None, api_output=None, ADD api_exclude=None): ADD """ ADD Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing. ADD ADD Can also build API docs by giving ``api_target`` and optionally ADD ``api_output`` and/or ``api_exclude``. ADD """ ADD if api_target: ADD kwargs = {'target': api_target} ADD if api_output: ADD kwargs['output'] = api_output ADD if api_exclude: ADD kwargs['exclude'] = api_exclude ADD api_docs.body(**kwargs) CON if clean:
<<<<<<< SEARCH @task def docs(clean=False, browse=False): if clean: clean_docs.body() ======= @task def api_docs(target, output="api", exclude=""): """ Runs ``sphinx-apidoc`` to autogenerate your API docs. Must give target directory/package as ``target``. Results are written out to ``docs/<output>`` (``docs/api`` by default). To exclude certain output files from the final build give ``exclude`` as a comma separated list of file paths. """ output = os.path.join('docs', output) # Have to make these absolute or apidoc is dumb :( exclude = map( lambda x: os.path.abspath(os.path.join(os.getcwd(), x)), exclude.split(',') ) run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude))) @task def docs(clean=False, browse=False, api_target=None, api_output=None, api_exclude=None): """ Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing. Can also build API docs by giving ``api_target`` and optionally ``api_output`` and/or ``api_exclude``. """ if api_target: kwargs = {'target': api_target} if api_output: kwargs['output'] = api_output if api_exclude: kwargs['exclude'] = api_exclude api_docs.body(**kwargs) if clean: clean_docs.body() >>>>>>> REPLACE
edelooff/sqlalchemy-json
db6b869eae416e72fa30b1d7271b0ed1d7dc1a55
sqlalchemy_json/__init__.py
python
bsd-2-clause
Fix error when setting JSON value to be `None` Previously this would raise an attribute error as `None` does not have the `coerce` attribute.
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if value is None: return value if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
2
0
1
add_only
--- a/sqlalchemy_json/__init__.py +++ b/sqlalchemy_json/__init__.py @@ -37,2 +37,4 @@ """Convert plain dictionary to NestedMutable.""" + if value is None: + return value if isinstance(value, cls):
--- a/sqlalchemy_json/__init__.py +++ b/sqlalchemy_json/__init__.py @@ ... @@ """Convert plain dictionary to NestedMutable.""" + if value is None: + return value if isinstance(value, cls):
--- a/sqlalchemy_json/__init__.py +++ b/sqlalchemy_json/__init__.py @@ -37,2 +37,4 @@ CON """Convert plain dictionary to NestedMutable.""" ADD if value is None: ADD return value CON if isinstance(value, cls):
<<<<<<< SEARCH def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if isinstance(value, cls): return value ======= def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if value is None: return value if isinstance(value, cls): return value >>>>>>> REPLACE
OJFord/tapioca
a03f6af6dd476a8ac19496a4c0d118e16249396e
examples/httpbin.rs
rust
mit
Improve example as a demo
#![feature(associated_consts)] #![feature(use_extern_macros)] #[macro_use] extern crate tapioca; infer_api!(httpbin, "https://raw.githubusercontent.com/OJFord/tapioca/master/tests/schemata/httpbin.yml"); use httpbin::basic_auth__user__hunter_2 as basic_auth; use basic_auth::get::OpAuth::HttpBasic; static USER: &str = "baz"; fn main() { let auth = httpbin::ServerAuth::new(); match httpbin::ip::get(auth) { Ok(response) => match response.body() { httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), _ => panic!(), }, _ => println!("Failed to find IP address"), } let query = httpbin::post::post::QueryParams { echo: Some("echo me!".into()), }; match httpbin::post::post(&query, auth) { Ok(response) => match response.body() { httpbin::post::post::OkBody::Status200(_) => assert!(true), _ => panic!(), }, _ => panic!(), } let user_id = basic_auth::ResourceId_user::from_static(USER); let auth = HttpBasic((USER.into(), "hunter2".into()).into()); let response = basic_auth::get(&user_id, auth.into()); println!("Auth response: {:?}", response.body()); }
#![feature(associated_consts)] #![feature(use_extern_macros)] #[macro_use] extern crate tapioca; infer_api!(httpbin, "https://raw.githubusercontent.com/OJFord/tapioca/master/tests/schemata/httpbin.yml"); use httpbin::basic_auth__user__hunter_2 as basic_auth; use basic_auth::get::OpAuth::HttpBasic; static USER: &str = "baz"; fn main() { let auth = httpbin::ServerAuth::new(); match httpbin::ip::get(auth) { Ok(response) => match response.body() { httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), _ => println!("httbin.org did something unexpected"), }, Err(_) => println!("httpbin.org errored"), } let user_id = basic_auth::ResourceId_user::from_static(USER); let auth = HttpBasic((USER.into(), "hunter2".into()).into()); match basic_auth::get(&user_id, auth.into()) { Ok(response) => match response.body() { basic_auth::get::OkBody::Status200(body) => if body.authenticated { println!("User '{}' authenticated OK!", body.user) } else { println!("Authentication failed for user '{}'!", body.user) }, _ => println!("httbin.org did something unexpected"), }, Err(_) => println!("httpbin.org errored"), } }
13
15
2
mixed
--- a/examples/httpbin.rs +++ b/examples/httpbin.rs @@ -19,16 +19,5 @@ httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), - _ => panic!(), + _ => println!("httbin.org did something unexpected"), }, - _ => println!("Failed to find IP address"), - } - - let query = httpbin::post::post::QueryParams { - echo: Some("echo me!".into()), - }; - match httpbin::post::post(&query, auth) { - Ok(response) => match response.body() { - httpbin::post::post::OkBody::Status200(_) => assert!(true), - _ => panic!(), - }, - _ => panic!(), + Err(_) => println!("httpbin.org errored"), } @@ -37,4 +26,13 @@ let auth = HttpBasic((USER.into(), "hunter2".into()).into()); - let response = basic_auth::get(&user_id, auth.into()); - println!("Auth response: {:?}", response.body()); + match basic_auth::get(&user_id, auth.into()) { + Ok(response) => match response.body() { + basic_auth::get::OkBody::Status200(body) => if body.authenticated { + println!("User '{}' authenticated OK!", body.user) + } else { + println!("Authentication failed for user '{}'!", body.user) + }, + _ => println!("httbin.org did something unexpected"), + }, + Err(_) => println!("httpbin.org errored"), + } }
--- a/examples/httpbin.rs +++ b/examples/httpbin.rs @@ ... @@ httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), - _ => panic!(), + _ => println!("httbin.org did something unexpected"), }, - _ => println!("Failed to find IP address"), - } - - let query = httpbin::post::post::QueryParams { - echo: Some("echo me!".into()), - }; - match httpbin::post::post(&query, auth) { - Ok(response) => match response.body() { - httpbin::post::post::OkBody::Status200(_) => assert!(true), - _ => panic!(), - }, - _ => panic!(), + Err(_) => println!("httpbin.org errored"), } @@ ... @@ let auth = HttpBasic((USER.into(), "hunter2".into()).into()); - let response = basic_auth::get(&user_id, auth.into()); - println!("Auth response: {:?}", response.body()); + match basic_auth::get(&user_id, auth.into()) { + Ok(response) => match response.body() { + basic_auth::get::OkBody::Status200(body) => if body.authenticated { + println!("User '{}' authenticated OK!", body.user) + } else { + println!("Authentication failed for user '{}'!", body.user) + }, + _ => println!("httbin.org did something unexpected"), + }, + Err(_) => println!("httpbin.org errored"), + } }
--- a/examples/httpbin.rs +++ b/examples/httpbin.rs @@ -19,16 +19,5 @@ CON httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), DEL _ => panic!(), ADD _ => println!("httbin.org did something unexpected"), CON }, DEL _ => println!("Failed to find IP address"), DEL } DEL DEL let query = httpbin::post::post::QueryParams { DEL echo: Some("echo me!".into()), DEL }; DEL match httpbin::post::post(&query, auth) { DEL Ok(response) => match response.body() { DEL httpbin::post::post::OkBody::Status200(_) => assert!(true), DEL _ => panic!(), DEL }, DEL _ => panic!(), ADD Err(_) => println!("httpbin.org errored"), CON } @@ -37,4 +26,13 @@ CON let auth = HttpBasic((USER.into(), "hunter2".into()).into()); DEL let response = basic_auth::get(&user_id, auth.into()); DEL println!("Auth response: {:?}", response.body()); ADD match basic_auth::get(&user_id, auth.into()) { ADD Ok(response) => match response.body() { ADD basic_auth::get::OkBody::Status200(body) => if body.authenticated { ADD println!("User '{}' authenticated OK!", body.user) ADD } else { ADD println!("Authentication failed for user '{}'!", body.user) ADD }, ADD _ => println!("httbin.org did something unexpected"), ADD }, ADD Err(_) => println!("httpbin.org errored"), ADD } CON }
<<<<<<< SEARCH Ok(response) => match response.body() { httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), _ => panic!(), }, _ => println!("Failed to find IP address"), } let query = httpbin::post::post::QueryParams { echo: Some("echo me!".into()), }; match httpbin::post::post(&query, auth) { Ok(response) => match response.body() { httpbin::post::post::OkBody::Status200(_) => assert!(true), _ => panic!(), }, _ => panic!(), } let user_id = basic_auth::ResourceId_user::from_static(USER); let auth = HttpBasic((USER.into(), "hunter2".into()).into()); let response = basic_auth::get(&user_id, auth.into()); println!("Auth response: {:?}", response.body()); } ======= Ok(response) => match response.body() { httpbin::ip::get::OkBody::Status200(body) => println!("Your IP is {}", body.origin), _ => println!("httbin.org did something unexpected"), }, Err(_) => println!("httpbin.org errored"), } let user_id = basic_auth::ResourceId_user::from_static(USER); let auth = HttpBasic((USER.into(), "hunter2".into()).into()); match basic_auth::get(&user_id, auth.into()) { Ok(response) => match response.body() { basic_auth::get::OkBody::Status200(body) => if body.authenticated { println!("User '{}' authenticated OK!", body.user) } else { println!("Authentication failed for user '{}'!", body.user) }, _ => println!("httbin.org did something unexpected"), }, Err(_) => println!("httpbin.org errored"), } } >>>>>>> REPLACE
alexrudy/Cauldron
4623c8f0de7ff41f78754df6811570a4d4367728
Cauldron/ext/commandkeywords/__init__.py
python
bsd-3-clause
Make command-keyword compatible with DFW implementation
# -*- coding: utf-8 -*- """ An extension for a command-based keyword. """ from __future__ import absolute_import from Cauldron.types import Boolean, DispatcherKeywordType from Cauldron.exc import NoWriteNecessary class CommandKeyword(Boolean, DispatcherKeywordType): """This keyword will receive boolean writes as 1, and will always be set to 0. Actions can then be performed in callbacks, etc., every time this keyword is triggered. """ KTL_REGISTERED = False KTL_TYPE = 'boolean' def __init__(self, *args, **kwargs): kwargs['initial'] = '0' super(CommandKeyword, self).__init__(*args, **kwargs) def prewrite(self, value): """Before writing, trigger no-write-necssary if value is False""" if self.translate(value) == '0': raise NoWriteNecessary("No write needed, command not triggered.") return super(CommandKeyword, self).prewrite(value) def postwrite(self, value): """Special postwrite that always sets the value to '0'.""" self.set('0', force=True) # We don't have to do anything else here.
# -*- coding: utf-8 -*- """ An extension for a command-based keyword. """ from __future__ import absolute_import from Cauldron.types import Boolean, DispatcherKeywordType from Cauldron.exc import NoWriteNecessary from Cauldron.utils.callbacks import Callbacks class CommandKeyword(Boolean, DispatcherKeywordType): """This keyword will receive boolean writes as 1, and will always be set to 0. Actions can then be performed in callbacks, etc., every time this keyword is triggered. """ KTL_REGISTERED = False KTL_TYPE = 'boolean' def __init__(self, *args, **kwargs): kwargs['initial'] = '0' super(CommandKeyword, self).__init__(*args, **kwargs) self._cbs = Callbacks() def command(self, func): """Add command items.""" self._cbs.add(func) def prewrite(self, value): """Before writing, trigger no-write-necssary if value is False""" if self.translate(value) == '0': raise NoWriteNecessary("No write needed, command not triggered.") return super(CommandKeyword, self).prewrite(value) def write(self, value): """Write to the commands.""" if str(value) == '1': self._cbs(self) def postwrite(self, value): """Special postwrite that always sets the value to '0'.""" self.set('0', force=True) # We don't have to do anything else here.
11
0
3
add_only
--- a/Cauldron/ext/commandkeywords/__init__.py +++ b/Cauldron/ext/commandkeywords/__init__.py @@ -8,2 +8,3 @@ from Cauldron.exc import NoWriteNecessary +from Cauldron.utils.callbacks import Callbacks @@ -22,2 +23,7 @@ super(CommandKeyword, self).__init__(*args, **kwargs) + self._cbs = Callbacks() + + def command(self, func): + """Add command items.""" + self._cbs.add(func) @@ -29,2 +35,7 @@ + def write(self, value): + """Write to the commands.""" + if str(value) == '1': + self._cbs(self) + def postwrite(self, value):
--- a/Cauldron/ext/commandkeywords/__init__.py +++ b/Cauldron/ext/commandkeywords/__init__.py @@ ... @@ from Cauldron.exc import NoWriteNecessary +from Cauldron.utils.callbacks import Callbacks @@ ... @@ super(CommandKeyword, self).__init__(*args, **kwargs) + self._cbs = Callbacks() + + def command(self, func): + """Add command items.""" + self._cbs.add(func) @@ ... @@ + def write(self, value): + """Write to the commands.""" + if str(value) == '1': + self._cbs(self) + def postwrite(self, value):
--- a/Cauldron/ext/commandkeywords/__init__.py +++ b/Cauldron/ext/commandkeywords/__init__.py @@ -8,2 +8,3 @@ CON from Cauldron.exc import NoWriteNecessary ADD from Cauldron.utils.callbacks import Callbacks CON @@ -22,2 +23,7 @@ CON super(CommandKeyword, self).__init__(*args, **kwargs) ADD self._cbs = Callbacks() ADD ADD def command(self, func): ADD """Add command items.""" ADD self._cbs.add(func) CON @@ -29,2 +35,7 @@ CON ADD def write(self, value): ADD """Write to the commands.""" ADD if str(value) == '1': ADD self._cbs(self) ADD CON def postwrite(self, value):
<<<<<<< SEARCH from Cauldron.types import Boolean, DispatcherKeywordType from Cauldron.exc import NoWriteNecessary class CommandKeyword(Boolean, DispatcherKeywordType): ======= from Cauldron.types import Boolean, DispatcherKeywordType from Cauldron.exc import NoWriteNecessary from Cauldron.utils.callbacks import Callbacks class CommandKeyword(Boolean, DispatcherKeywordType): >>>>>>> REPLACE <<<<<<< SEARCH kwargs['initial'] = '0' super(CommandKeyword, self).__init__(*args, **kwargs) def prewrite(self, value): ======= kwargs['initial'] = '0' super(CommandKeyword, self).__init__(*args, **kwargs) self._cbs = Callbacks() def command(self, func): """Add command items.""" self._cbs.add(func) def prewrite(self, value): >>>>>>> REPLACE <<<<<<< SEARCH return super(CommandKeyword, self).prewrite(value) def postwrite(self, value): """Special postwrite that always sets the value to '0'.""" ======= return super(CommandKeyword, self).prewrite(value) def write(self, value): """Write to the commands.""" if str(value) == '1': self._cbs(self) def postwrite(self, value): """Special postwrite that always sets the value to '0'.""" >>>>>>> REPLACE
Doist/TodoistPojos
bc9e1ff1d8f31cebe6a4820572482dc318427bd4
src/main/java/com/todoist/pojo/Workspace.kt
kotlin
mit
Remove members from workspace model
package com.todoist.pojo open class Workspace( id: Long, open var name: String, open var description: String?, open var logoBig: String?, open var logoMedium: String?, open var logoSmall: String?, open var logoS640: String?, open var isInviteOnlyDefault: Boolean, open var defaultCollaboratorRole: Collaborator.Role, open var createdAt: Long, open var isCollapsed: Boolean, open var members: List<WorkspaceMember>?, isDeleted: Boolean, ) : Model(id, isDeleted) { sealed class Role(protected open val key: String) { object Admin : Role("ADMIN") object Member : Role("MEMBER") object Guest : Role("GUEST") data class Unknown(override val key: String) : Role(key) object Invalid : Role("INVALID") override fun toString() = key companion object { fun get(typeKey: String?): Role { val upperCasedKey = typeKey?.uppercase() return when { upperCasedKey.isNullOrEmpty() -> Invalid upperCasedKey == Admin.key -> Admin upperCasedKey == Member.key -> Member upperCasedKey == Guest.key -> Guest else -> Unknown(typeKey) } } } } companion object { fun sanitizeName(name: String): String = Sanitizers.WORKSPACE_NAME_INVALID_PATTERN.matcher(name.trim()) .replaceAll(Sanitizers.REPLACEMENT) } }
package com.todoist.pojo open class Workspace( id: Long, open var name: String, open var description: String?, open var logoBig: String?, open var logoMedium: String?, open var logoSmall: String?, open var logoS640: String?, open var isInviteOnlyDefault: Boolean, open var defaultCollaboratorRole: Collaborator.Role, open var createdAt: Long, open var isCollapsed: Boolean, isDeleted: Boolean, ) : Model(id, isDeleted) { sealed class Role(protected open val key: String) { object Admin : Role("ADMIN") object Member : Role("MEMBER") object Guest : Role("GUEST") data class Unknown(override val key: String) : Role(key) object Invalid : Role("INVALID") override fun toString() = key companion object { fun get(typeKey: String?): Role { val upperCasedKey = typeKey?.uppercase() return when { upperCasedKey.isNullOrEmpty() -> Invalid upperCasedKey == Admin.key -> Admin upperCasedKey == Member.key -> Member upperCasedKey == Guest.key -> Guest else -> Unknown(typeKey) } } } } companion object { fun sanitizeName(name: String): String = Sanitizers.WORKSPACE_NAME_INVALID_PATTERN.matcher(name.trim()) .replaceAll(Sanitizers.REPLACEMENT) } }
0
1
1
del_only
--- a/src/main/java/com/todoist/pojo/Workspace.kt +++ b/src/main/java/com/todoist/pojo/Workspace.kt @@ -14,3 +14,2 @@ open var isCollapsed: Boolean, - open var members: List<WorkspaceMember>?, isDeleted: Boolean,
--- a/src/main/java/com/todoist/pojo/Workspace.kt +++ b/src/main/java/com/todoist/pojo/Workspace.kt @@ ... @@ open var isCollapsed: Boolean, - open var members: List<WorkspaceMember>?, isDeleted: Boolean,
--- a/src/main/java/com/todoist/pojo/Workspace.kt +++ b/src/main/java/com/todoist/pojo/Workspace.kt @@ -14,3 +14,2 @@ CON open var isCollapsed: Boolean, DEL open var members: List<WorkspaceMember>?, CON isDeleted: Boolean,
<<<<<<< SEARCH open var createdAt: Long, open var isCollapsed: Boolean, open var members: List<WorkspaceMember>?, isDeleted: Boolean, ) : Model(id, isDeleted) { ======= open var createdAt: Long, open var isCollapsed: Boolean, isDeleted: Boolean, ) : Model(id, isDeleted) { >>>>>>> REPLACE
jonschlinkert/add-banner
2d47d3a2c16ca1a855953ac0e1c91bf8cace6c1a
test.js
javascript
mit
Add backwards compatibility for Node.js 0.10
var banner = require('./'); let chai = require('chai'); let expect = chai.expect; describe('banner', () => { const FILEPATH = 'test-target.js'; context('without options (using defaults)', () => { let expectation = `/*! * add-banner <https://github.com/jonschlinkert/add-banner> * * Copyright (c) 2018 Jon Schlinkert, contributors. * Licensed under the MIT license. */ `; it('expected to populate banner', () => { expect(banner(FILEPATH)).to.eql(expectation); }); }); context('with specific options', () => { let options = { name: 'addbanner', author: 'Jon Schlinkert (https://github.com/jonschlinkert)', homepage: 'https://github.com/jonschlinkert/add-banner', banner: 'banner.tmpl', year: '2017', license: 'GPL-3' }; let expectation = `/*! * addbanner <https://github.com/jonschlinkert/add-banner> * * Copyright (c) 2017 Jon Schlinkert, contributors. * Licensed under the GPL-3 license. */ `; it('expected to populate banner', () => { expect(banner(FILEPATH, options)).to.eql(expectation); }); }); });
var banner = require('./'); let chai = require('chai'); let expect = chai.expect; describe('banner', () => { let filepath = 'test-target.js'; context('without options (using defaults)', () => { let expectation = `/*! * add-banner <https://github.com/jonschlinkert/add-banner> * * Copyright (c) 2018 Jon Schlinkert, contributors. * Licensed under the MIT license. */ `; it('expected to populate banner', () => { expect(banner(filepath)).to.eql(expectation); }); }); context('with specific options', () => { let options = { name: 'addbanner', author: 'Jon Schlinkert (https://github.com/jonschlinkert)', homepage: 'https://github.com/jonschlinkert/add-banner', banner: 'banner.tmpl', year: '2017', license: 'GPL-3' }; let expectation = `/*! * addbanner <https://github.com/jonschlinkert/add-banner> * * Copyright (c) 2017 Jon Schlinkert, contributors. * Licensed under the GPL-3 license. */ `; it('expected to populate banner', () => { expect(banner(filepath, options)).to.eql(expectation); }); }); });
3
3
3
mixed
--- a/test.js +++ b/test.js @@ -6,3 +6,3 @@ - const FILEPATH = 'test-target.js'; + let filepath = 'test-target.js'; @@ -18,3 +18,3 @@ it('expected to populate banner', () => { - expect(banner(FILEPATH)).to.eql(expectation); + expect(banner(filepath)).to.eql(expectation); }); @@ -43,3 +43,3 @@ it('expected to populate banner', () => { - expect(banner(FILEPATH, options)).to.eql(expectation); + expect(banner(filepath, options)).to.eql(expectation); });
--- a/test.js +++ b/test.js @@ ... @@ - const FILEPATH = 'test-target.js'; + let filepath = 'test-target.js'; @@ ... @@ it('expected to populate banner', () => { - expect(banner(FILEPATH)).to.eql(expectation); + expect(banner(filepath)).to.eql(expectation); }); @@ ... @@ it('expected to populate banner', () => { - expect(banner(FILEPATH, options)).to.eql(expectation); + expect(banner(filepath, options)).to.eql(expectation); });
--- a/test.js +++ b/test.js @@ -6,3 +6,3 @@ CON DEL const FILEPATH = 'test-target.js'; ADD let filepath = 'test-target.js'; CON @@ -18,3 +18,3 @@ CON it('expected to populate banner', () => { DEL expect(banner(FILEPATH)).to.eql(expectation); ADD expect(banner(filepath)).to.eql(expectation); CON }); @@ -43,3 +43,3 @@ CON it('expected to populate banner', () => { DEL expect(banner(FILEPATH, options)).to.eql(expectation); ADD expect(banner(filepath, options)).to.eql(expectation); CON });
<<<<<<< SEARCH describe('banner', () => { const FILEPATH = 'test-target.js'; context('without options (using defaults)', () => { ======= describe('banner', () => { let filepath = 'test-target.js'; context('without options (using defaults)', () => { >>>>>>> REPLACE <<<<<<< SEARCH `; it('expected to populate banner', () => { expect(banner(FILEPATH)).to.eql(expectation); }); }); ======= `; it('expected to populate banner', () => { expect(banner(filepath)).to.eql(expectation); }); }); >>>>>>> REPLACE <<<<<<< SEARCH it('expected to populate banner', () => { expect(banner(FILEPATH, options)).to.eql(expectation); }); }); ======= it('expected to populate banner', () => { expect(banner(filepath, options)).to.eql(expectation); }); }); >>>>>>> REPLACE
pgollakota/django-chartit
b974bbcc7e243fca7c3dc63fbbaf530fe9b69e50
runtests.py
python
bsd-2-clause
Load DB migrations before testing and use verbose=2 and failfast Note that we use `manage.py test` instead of `manage.py migrate` and manually running the tests. This lets Django take care of applying migrations before running tests. This works around https://code.djangoproject.com/ticket/22487 which causes a test failure on Django 1.8.14. In 1.8.14 somehow we end up without any actual data in the test DB and one of the tests fails if we use `manage.py migrate` and run the tests manually via TestRunner.
import sys try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } }, INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", "django.contrib.sites", "chartit", "chartit_tests", ], SITE_ID=1, MIDDLEWARE_CLASSES=(), STATIC_URL='/static/' ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # Run tests TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == "__main__": run_tests(*sys.argv[1:])
import os import sys try: sys.path.append('demoproject') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") from django.conf import settings from django.core.management import call_command settings.DATABASES['default']['NAME'] = ':memory:' settings.INSTALLED_APPS.append('chartit_tests') try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # ./manage.py test takes care of database creation and # application of migrations if any result = call_command('test', *test_args, verbosity=2, failfast=True) sys.exit(result) if __name__ == "__main__": run_tests(*sys.argv[1:])
11
30
3
mixed
--- a/runtests.py +++ b/runtests.py @@ -1 +1,2 @@ +import os import sys @@ -3,26 +4,10 @@ try: + sys.path.append('demoproject') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") + from django.conf import settings - from django.test.utils import get_runner + from django.core.management import call_command - settings.configure( - DEBUG=True, - USE_TZ=True, - DATABASES={ - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": ":memory:", - } - }, - INSTALLED_APPS=[ - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.staticfiles", - "django.contrib.sites", - "chartit", - "chartit_tests", - ], - SITE_ID=1, - MIDDLEWARE_CLASSES=(), - STATIC_URL='/static/' - ) + settings.DATABASES['default']['NAME'] = ':memory:' + settings.INSTALLED_APPS.append('chartit_tests') @@ -46,10 +31,6 @@ - # Run tests - TestRunner = get_runner(settings) - test_runner = TestRunner() - - failures = test_runner.run_tests(test_args) - - if failures: - sys.exit(bool(failures)) + # ./manage.py test takes care of database creation and + # application of migrations if any + result = call_command('test', *test_args, verbosity=2, failfast=True) + sys.exit(result)
--- a/runtests.py +++ b/runtests.py @@ ... @@ +import os import sys @@ ... @@ try: + sys.path.append('demoproject') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") + from django.conf import settings - from django.test.utils import get_runner + from django.core.management import call_command - settings.configure( - DEBUG=True, - USE_TZ=True, - DATABASES={ - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": ":memory:", - } - }, - INSTALLED_APPS=[ - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.staticfiles", - "django.contrib.sites", - "chartit", - "chartit_tests", - ], - SITE_ID=1, - MIDDLEWARE_CLASSES=(), - STATIC_URL='/static/' - ) + settings.DATABASES['default']['NAME'] = ':memory:' + settings.INSTALLED_APPS.append('chartit_tests') @@ ... @@ - # Run tests - TestRunner = get_runner(settings) - test_runner = TestRunner() - - failures = test_runner.run_tests(test_args) - - if failures: - sys.exit(bool(failures)) + # ./manage.py test takes care of database creation and + # application of migrations if any + result = call_command('test', *test_args, verbosity=2, failfast=True) + sys.exit(result)
--- a/runtests.py +++ b/runtests.py @@ -1 +1,2 @@ ADD import os CON import sys @@ -3,26 +4,10 @@ CON try: ADD sys.path.append('demoproject') ADD os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") ADD CON from django.conf import settings DEL from django.test.utils import get_runner ADD from django.core.management import call_command CON DEL settings.configure( DEL DEBUG=True, DEL USE_TZ=True, DEL DATABASES={ DEL "default": { DEL "ENGINE": "django.db.backends.sqlite3", DEL "NAME": ":memory:", DEL } DEL }, DEL INSTALLED_APPS=[ DEL "django.contrib.auth", DEL "django.contrib.contenttypes", DEL "django.contrib.staticfiles", DEL "django.contrib.sites", DEL "chartit", DEL "chartit_tests", DEL ], DEL SITE_ID=1, DEL MIDDLEWARE_CLASSES=(), DEL STATIC_URL='/static/' DEL ) ADD settings.DATABASES['default']['NAME'] = ':memory:' ADD settings.INSTALLED_APPS.append('chartit_tests') CON @@ -46,10 +31,6 @@ CON DEL # Run tests DEL TestRunner = get_runner(settings) DEL test_runner = TestRunner() DEL DEL failures = test_runner.run_tests(test_args) DEL DEL if failures: DEL sys.exit(bool(failures)) ADD # ./manage.py test takes care of database creation and ADD # application of migrations if any ADD result = call_command('test', *test_args, verbosity=2, failfast=True) ADD sys.exit(result) CON
<<<<<<< SEARCH import sys try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } }, INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", "django.contrib.sites", "chartit", "chartit_tests", ], SITE_ID=1, MIDDLEWARE_CLASSES=(), STATIC_URL='/static/' ) try: ======= import os import sys try: sys.path.append('demoproject') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") from django.conf import settings from django.core.management import call_command settings.DATABASES['default']['NAME'] = ':memory:' settings.INSTALLED_APPS.append('chartit_tests') try: >>>>>>> REPLACE <<<<<<< SEARCH test_args = ["chartit_tests"] # Run tests TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) ======= test_args = ["chartit_tests"] # ./manage.py test takes care of database creation and # application of migrations if any result = call_command('test', *test_args, verbosity=2, failfast=True) sys.exit(result) >>>>>>> REPLACE
pluggerjs/plugger
3085397f29285a2a0b2f8e08cd839ed7a2278b26
lib/core/pmodule.js
javascript
mit
Add automatic pjs module search
const pp = require('./ppublish'); const pt = require('./ptalk'); const utils = require('../common/utils'); const fs = require('fs'); const commander = require('commander'); const path = require('path'); commander .option('-m --module <file>', 'specify a pjs file') .option('-s --server <String>', 'specify the server address') .parse(process.argv); if (commander.module && commander.server) { console.log(commander.module, commander.server) fs.readFile(commander.module, function (err, data) { if (err) throw err; utils.rndPort().then((port) => { var addr = "tcp://127.0.0.1:" + port; data = JSON.parse(data); data.port = addr; pp.announce(data, commander.server); const dirname = path.dirname(commander.module); const imported = require(path.resolve(path.join(dirname, data.name))); pt(addr, function (name, params) { return imported[name](params); }); }).catch((err) => { console.log(err) }); }); }
const pp = require('./ppublish'); const pt = require('./ptalk'); const utils = require('../common/utils'); const fs = require('fs'); const commander = require('commander'); const path = require('path'); const { exec } = require('child_process'); commander .option('-m --module <file>', 'specify a pjs file') .option('-s --server <String>', 'specify the server address') .parse(process.argv); function findFiles(folder, extension, cb){ folder = path.resolve(folder); var command = ""; if(/^win/.test(process.platform)){ command = `dir /B ${folder}\\*."${extension}`; } else{ command = `ls -1 ${folder}/*.${extension}`; } exec(command, (err, stdout, stderr) => { if(err) return cb(err, null); //get rid of \r from windows stdout = stdout.replace(/\r/g,""); var files = stdout.split("\n"); //remove last entry because it is empty files.splice(-1, 1); cb(err, files); }); } if (commander.module && commander.server) { findFiles(commander.module, 'pjs', (err, files) => { fs.readFile(files[0], (err, data) => { if (err) throw err; utils.rndPort().then((port) => { var addr = "tcp://127.0.0.1:" + port; data = JSON.parse(data); data.port = addr; pp.announce(data, commander.server); const imported = require(path.resolve(path.join(commander.module, data.name))); pt(addr, function (name, params) { return imported[name](params); }); }).catch((err) => { console.log(err) }); }); }); }
38
16
2
mixed
--- a/lib/core/pmodule.js +++ b/lib/core/pmodule.js @@ -6,2 +6,3 @@ const path = require('path'); +const { exec } = require('child_process'); @@ -12,24 +13,45 @@ +function findFiles(folder, extension, cb){ + folder = path.resolve(folder); + + var command = ""; + + if(/^win/.test(process.platform)){ + command = `dir /B ${folder}\\*."${extension}`; + } else{ + command = `ls -1 ${folder}/*.${extension}`; + } + + exec(command, (err, stdout, stderr) => { + if(err) return cb(err, null); + //get rid of \r from windows + stdout = stdout.replace(/\r/g,""); + var files = stdout.split("\n"); + //remove last entry because it is empty + files.splice(-1, 1); + cb(err, files); + }); +} + if (commander.module && commander.server) { - console.log(commander.module, commander.server) - fs.readFile(commander.module, function (err, data) { - if (err) throw err; + findFiles(commander.module, 'pjs', (err, files) => { + fs.readFile(files[0], (err, data) => { + if (err) throw err; - utils.rndPort().then((port) => { - var addr = "tcp://127.0.0.1:" + port; - data = JSON.parse(data); - data.port = addr; + utils.rndPort().then((port) => { + var addr = "tcp://127.0.0.1:" + port; + data = JSON.parse(data); + data.port = addr; - pp.announce(data, commander.server); + pp.announce(data, commander.server); - const dirname = path.dirname(commander.module); + const imported = require(path.resolve(path.join(commander.module, data.name))); + + pt(addr, function (name, params) { + return imported[name](params); + }); - const imported = require(path.resolve(path.join(dirname, data.name))); - - pt(addr, function (name, params) { - return imported[name](params); + }).catch((err) => { + console.log(err) }); - - }).catch((err) => { - console.log(err) });
--- a/lib/core/pmodule.js +++ b/lib/core/pmodule.js @@ ... @@ const path = require('path'); +const { exec } = require('child_process'); @@ ... @@ +function findFiles(folder, extension, cb){ + folder = path.resolve(folder); + + var command = ""; + + if(/^win/.test(process.platform)){ + command = `dir /B ${folder}\\*."${extension}`; + } else{ + command = `ls -1 ${folder}/*.${extension}`; + } + + exec(command, (err, stdout, stderr) => { + if(err) return cb(err, null); + //get rid of \r from windows + stdout = stdout.replace(/\r/g,""); + var files = stdout.split("\n"); + //remove last entry because it is empty + files.splice(-1, 1); + cb(err, files); + }); +} + if (commander.module && commander.server) { - console.log(commander.module, commander.server) - fs.readFile(commander.module, function (err, data) { - if (err) throw err; + findFiles(commander.module, 'pjs', (err, files) => { + fs.readFile(files[0], (err, data) => { + if (err) throw err; - utils.rndPort().then((port) => { - var addr = "tcp://127.0.0.1:" + port; - data = JSON.parse(data); - data.port = addr; + utils.rndPort().then((port) => { + var addr = "tcp://127.0.0.1:" + port; + data = JSON.parse(data); + data.port = addr; - pp.announce(data, commander.server); + pp.announce(data, commander.server); - const dirname = path.dirname(commander.module); + const imported = require(path.resolve(path.join(commander.module, data.name))); + + pt(addr, function (name, params) { + return imported[name](params); + }); - const imported = require(path.resolve(path.join(dirname, data.name))); - - pt(addr, function (name, params) { - return imported[name](params); + }).catch((err) => { + console.log(err) }); - - }).catch((err) => { - console.log(err) });
--- a/lib/core/pmodule.js +++ b/lib/core/pmodule.js @@ -6,2 +6,3 @@ CON const path = require('path'); ADD const { exec } = require('child_process'); CON @@ -12,24 +13,45 @@ CON ADD function findFiles(folder, extension, cb){ ADD folder = path.resolve(folder); ADD ADD var command = ""; ADD ADD if(/^win/.test(process.platform)){ ADD command = `dir /B ${folder}\\*."${extension}`; ADD } else{ ADD command = `ls -1 ${folder}/*.${extension}`; ADD } ADD ADD exec(command, (err, stdout, stderr) => { ADD if(err) return cb(err, null); ADD //get rid of \r from windows ADD stdout = stdout.replace(/\r/g,""); ADD var files = stdout.split("\n"); ADD //remove last entry because it is empty ADD files.splice(-1, 1); ADD cb(err, files); ADD }); ADD } ADD CON if (commander.module && commander.server) { DEL console.log(commander.module, commander.server) DEL fs.readFile(commander.module, function (err, data) { DEL if (err) throw err; ADD findFiles(commander.module, 'pjs', (err, files) => { ADD fs.readFile(files[0], (err, data) => { ADD if (err) throw err; CON DEL utils.rndPort().then((port) => { DEL var addr = "tcp://127.0.0.1:" + port; DEL data = JSON.parse(data); DEL data.port = addr; ADD utils.rndPort().then((port) => { ADD var addr = "tcp://127.0.0.1:" + port; ADD data = JSON.parse(data); ADD data.port = addr; CON DEL pp.announce(data, commander.server); ADD pp.announce(data, commander.server); CON DEL const dirname = path.dirname(commander.module); ADD const imported = require(path.resolve(path.join(commander.module, data.name))); ADD ADD pt(addr, function (name, params) { ADD return imported[name](params); ADD }); CON DEL const imported = require(path.resolve(path.join(dirname, data.name))); DEL DEL pt(addr, function (name, params) { DEL return imported[name](params); ADD }).catch((err) => { ADD console.log(err) CON }); DEL DEL }).catch((err) => { DEL console.log(err) CON });
<<<<<<< SEARCH const commander = require('commander'); const path = require('path'); commander ======= const commander = require('commander'); const path = require('path'); const { exec } = require('child_process'); commander >>>>>>> REPLACE <<<<<<< SEARCH .parse(process.argv); if (commander.module && commander.server) { console.log(commander.module, commander.server) fs.readFile(commander.module, function (err, data) { if (err) throw err; utils.rndPort().then((port) => { var addr = "tcp://127.0.0.1:" + port; data = JSON.parse(data); data.port = addr; pp.announce(data, commander.server); const dirname = path.dirname(commander.module); const imported = require(path.resolve(path.join(dirname, data.name))); pt(addr, function (name, params) { return imported[name](params); }); }).catch((err) => { console.log(err) }); }); ======= .parse(process.argv); function findFiles(folder, extension, cb){ folder = path.resolve(folder); var command = ""; if(/^win/.test(process.platform)){ command = `dir /B ${folder}\\*."${extension}`; } else{ command = `ls -1 ${folder}/*.${extension}`; } exec(command, (err, stdout, stderr) => { if(err) return cb(err, null); //get rid of \r from windows stdout = stdout.replace(/\r/g,""); var files = stdout.split("\n"); //remove last entry because it is empty files.splice(-1, 1); cb(err, files); }); } if (commander.module && commander.server) { findFiles(commander.module, 'pjs', (err, files) => { fs.readFile(files[0], (err, data) => { if (err) throw err; utils.rndPort().then((port) => { var addr = "tcp://127.0.0.1:" + port; data = JSON.parse(data); data.port = addr; pp.announce(data, commander.server); const imported = require(path.resolve(path.join(commander.module, data.name))); pt(addr, function (name, params) { return imported[name](params); }); }).catch((err) => { console.log(err) }); }); }); >>>>>>> REPLACE
iandeboisblanc/evolution
73bc382819f98c30d6e4a77e0f8bc75ce18dc2c9
server/runEves.js
javascript
cc0-1.0
Add derived eves to Eves array
import settings from './helpers/settings' import {findDistance, limitPositions, chooseOne, randomInt, getAvgPostion} from './helpers/general' import {applyLimbForces, updateBodyPartPositions} from './helpers/movement' import {createEveData, deriveEveData} from './helpers/eveCreation' import {killEve, collectStats, saveStateToDB} from './helpers/lifeCycle' //Create initial data: var Eves = []; for(var i = 0; i < settings.eveCount; i ++) { Eves.push(createEveData()); } //Animate: setInterval(() => { applyLimbForces(Eves); updateBodyPartPositions(Eves); // console.log('YO!',chooseOne.valueOf()) }, settings.stepTime) //Selective Pressure: setInterval(() => { killEve(Eves); var eve = chooseOne(Eves); deriveEveData(eve); }, settings.killTime) // //Check progress // setInterval(() => { // console.log('Eve 1, limb1:', Eves[0].limbs[0]) // }, 1000) // setTimeout(animate, 1000); // setInterval(collectStats, 10000); // setInterval(function() { // killEves(); // Eves.push(deriveEveData(chooseOne(Eves))); // generateEves(); // }, 10000); //setInterval(saveStateToDB, 10000)
import settings from './helpers/settings' import {findDistance, limitPositions, chooseOne, randomInt, getAvgPostion} from './helpers/general' import {applyLimbForces, updateBodyPartPositions} from './helpers/movement' import {createEveData, deriveEveData} from './helpers/eveCreation' import {killEve, collectStats, saveStateToDB} from './helpers/lifeCycle' //Create initial data: var Eves = []; for(var i = 0; i < settings.eveCount; i ++) { Eves.push(createEveData()); } //Animate: setInterval(() => { applyLimbForces(Eves); updateBodyPartPositions(Eves); }, settings.stepTime) //Selective Pressure: setInterval(() => { killEve(Eves); var eve = chooseOne(Eves); Eves.push(deriveEveData(eve)); }, settings.killTime) // //Check progress // setInterval(() => { // console.log('Eve 1, limb1:', Eves[0].limbs[0]) // }, 1000) // setTimeout(animate, 1000); // setInterval(collectStats, 10000); // setInterval(function() { // killEves(); // Eves.push(deriveEveData(chooseOne(Eves))); // generateEves(); // }, 10000); //setInterval(saveStateToDB, 10000)
1
2
2
mixed
--- a/server/runEves.js +++ b/server/runEves.js @@ -16,3 +16,2 @@ updateBodyPartPositions(Eves); - // console.log('YO!',chooseOne.valueOf()) }, settings.stepTime) @@ -23,3 +22,3 @@ var eve = chooseOne(Eves); - deriveEveData(eve); + Eves.push(deriveEveData(eve)); }, settings.killTime)
--- a/server/runEves.js +++ b/server/runEves.js @@ ... @@ updateBodyPartPositions(Eves); - // console.log('YO!',chooseOne.valueOf()) }, settings.stepTime) @@ ... @@ var eve = chooseOne(Eves); - deriveEveData(eve); + Eves.push(deriveEveData(eve)); }, settings.killTime)
--- a/server/runEves.js +++ b/server/runEves.js @@ -16,3 +16,2 @@ CON updateBodyPartPositions(Eves); DEL // console.log('YO!',chooseOne.valueOf()) CON }, settings.stepTime) @@ -23,3 +22,3 @@ CON var eve = chooseOne(Eves); DEL deriveEveData(eve); ADD Eves.push(deriveEveData(eve)); CON }, settings.killTime)
<<<<<<< SEARCH applyLimbForces(Eves); updateBodyPartPositions(Eves); // console.log('YO!',chooseOne.valueOf()) }, settings.stepTime) ======= applyLimbForces(Eves); updateBodyPartPositions(Eves); }, settings.stepTime) >>>>>>> REPLACE <<<<<<< SEARCH killEve(Eves); var eve = chooseOne(Eves); deriveEveData(eve); }, settings.killTime) ======= killEve(Eves); var eve = chooseOne(Eves); Eves.push(deriveEveData(eve)); }, settings.killTime) >>>>>>> REPLACE
quarkusio/quarkus
a33daeae19c81846bf61b817d7a2f76297792d47
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java
java
apache-2.0
Reimplement the QuarkusJTAPlatform to avoid leaking the ORM registry
package io.quarkus.hibernate.orm.runtime.customized; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; public final class QuarkusJtaPlatform extends AbstractJtaPlatform { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); private QuarkusJtaPlatform() { //nothing } @Override protected TransactionManager locateTransactionManager() { return com.arjuna.ats.jta.TransactionManager.transactionManager(); } @Override protected UserTransaction locateUserTransaction() { return com.arjuna.ats.jta.UserTransaction.userTransaction(); } }
package io.quarkus.hibernate.orm.runtime.customized; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import org.hibernate.engine.transaction.jta.platform.internal.JtaSynchronizationStrategy; import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerAccess; import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerBasedSynchronizationStrategy; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; public final class QuarkusJtaPlatform implements JtaPlatform, TransactionManagerAccess { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); private final JtaSynchronizationStrategy tmSynchronizationStrategy = new TransactionManagerBasedSynchronizationStrategy( this); private volatile TransactionManager transactionManager; private volatile UserTransaction userTransaction; private QuarkusJtaPlatform() { //nothing } @Override public TransactionManager retrieveTransactionManager() { TransactionManager transactionManager = this.transactionManager; if (transactionManager == null) { transactionManager = com.arjuna.ats.jta.TransactionManager.transactionManager(); this.transactionManager = transactionManager; } return transactionManager; } @Override public TransactionManager getTransactionManager() { return retrieveTransactionManager(); } @Override public UserTransaction retrieveUserTransaction() { UserTransaction userTransaction = this.userTransaction; if (this.userTransaction == null) { userTransaction = com.arjuna.ats.jta.UserTransaction.userTransaction(); this.userTransaction = userTransaction; } return userTransaction; } @Override public Object getTransactionIdentifier(final Transaction transaction) { return transaction; } @Override public void registerSynchronization(Synchronization synchronization) { this.tmSynchronizationStrategy.registerSynchronization(synchronization); } @Override public boolean canRegisterSynchronization() { return this.tmSynchronizationStrategy.canRegisterSynchronization(); } @Override public int getCurrentStatus() throws SystemException { return this.retrieveTransactionManager().getStatus(); } }
52
6
4
mixed
--- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java @@ -2,2 +2,5 @@ +import javax.transaction.Synchronization; +import javax.transaction.SystemException; +import javax.transaction.Transaction; import javax.transaction.TransactionManager; @@ -5,7 +8,15 @@ -import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; +import org.hibernate.engine.transaction.jta.platform.internal.JtaSynchronizationStrategy; +import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerAccess; +import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerBasedSynchronizationStrategy; +import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; -public final class QuarkusJtaPlatform extends AbstractJtaPlatform { +public final class QuarkusJtaPlatform implements JtaPlatform, TransactionManagerAccess { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); + + private final JtaSynchronizationStrategy tmSynchronizationStrategy = new TransactionManagerBasedSynchronizationStrategy( + this); + private volatile TransactionManager transactionManager; + private volatile UserTransaction userTransaction; @@ -16,4 +27,9 @@ @Override - protected TransactionManager locateTransactionManager() { - return com.arjuna.ats.jta.TransactionManager.transactionManager(); + public TransactionManager retrieveTransactionManager() { + TransactionManager transactionManager = this.transactionManager; + if (transactionManager == null) { + transactionManager = com.arjuna.ats.jta.TransactionManager.transactionManager(); + this.transactionManager = transactionManager; + } + return transactionManager; } @@ -21,4 +37,34 @@ @Override - protected UserTransaction locateUserTransaction() { - return com.arjuna.ats.jta.UserTransaction.userTransaction(); + public TransactionManager getTransactionManager() { + return retrieveTransactionManager(); + } + + @Override + public UserTransaction retrieveUserTransaction() { + UserTransaction userTransaction = this.userTransaction; + if (this.userTransaction == null) { + userTransaction = com.arjuna.ats.jta.UserTransaction.userTransaction(); + this.userTransaction = userTransaction; + } + return userTransaction; + } + + @Override + public Object getTransactionIdentifier(final Transaction transaction) { + return transaction; + } + + @Override + public void registerSynchronization(Synchronization synchronization) { + this.tmSynchronizationStrategy.registerSynchronization(synchronization); + } + + @Override + public boolean canRegisterSynchronization() { + return this.tmSynchronizationStrategy.canRegisterSynchronization(); + } + + @Override + public int getCurrentStatus() throws SystemException { + return this.retrieveTransactionManager().getStatus(); }
--- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java @@ ... @@ +import javax.transaction.Synchronization; +import javax.transaction.SystemException; +import javax.transaction.Transaction; import javax.transaction.TransactionManager; @@ ... @@ -import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; +import org.hibernate.engine.transaction.jta.platform.internal.JtaSynchronizationStrategy; +import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerAccess; +import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerBasedSynchronizationStrategy; +import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; -public final class QuarkusJtaPlatform extends AbstractJtaPlatform { +public final class QuarkusJtaPlatform implements JtaPlatform, TransactionManagerAccess { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); + + private final JtaSynchronizationStrategy tmSynchronizationStrategy = new TransactionManagerBasedSynchronizationStrategy( + this); + private volatile TransactionManager transactionManager; + private volatile UserTransaction userTransaction; @@ ... @@ @Override - protected TransactionManager locateTransactionManager() { - return com.arjuna.ats.jta.TransactionManager.transactionManager(); + public TransactionManager retrieveTransactionManager() { + TransactionManager transactionManager = this.transactionManager; + if (transactionManager == null) { + transactionManager = com.arjuna.ats.jta.TransactionManager.transactionManager(); + this.transactionManager = transactionManager; + } + return transactionManager; } @@ ... @@ @Override - protected UserTransaction locateUserTransaction() { - return com.arjuna.ats.jta.UserTransaction.userTransaction(); + public TransactionManager getTransactionManager() { + return retrieveTransactionManager(); + } + + @Override + public UserTransaction retrieveUserTransaction() { + UserTransaction userTransaction = this.userTransaction; + if (this.userTransaction == null) { + userTransaction = com.arjuna.ats.jta.UserTransaction.userTransaction(); + this.userTransaction = userTransaction; + } + return userTransaction; + } + + @Override + public Object getTransactionIdentifier(final Transaction transaction) { + return transaction; + } + + @Override + public void registerSynchronization(Synchronization synchronization) { + this.tmSynchronizationStrategy.registerSynchronization(synchronization); + } + + @Override + public boolean canRegisterSynchronization() { + return this.tmSynchronizationStrategy.canRegisterSynchronization(); + } + + @Override + public int getCurrentStatus() throws SystemException { + return this.retrieveTransactionManager().getStatus(); }
--- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusJtaPlatform.java @@ -2,2 +2,5 @@ CON ADD import javax.transaction.Synchronization; ADD import javax.transaction.SystemException; ADD import javax.transaction.Transaction; CON import javax.transaction.TransactionManager; @@ -5,7 +8,15 @@ CON DEL import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; ADD import org.hibernate.engine.transaction.jta.platform.internal.JtaSynchronizationStrategy; ADD import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerAccess; ADD import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerBasedSynchronizationStrategy; ADD import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; CON DEL public final class QuarkusJtaPlatform extends AbstractJtaPlatform { ADD public final class QuarkusJtaPlatform implements JtaPlatform, TransactionManagerAccess { CON CON public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); ADD ADD private final JtaSynchronizationStrategy tmSynchronizationStrategy = new TransactionManagerBasedSynchronizationStrategy( ADD this); ADD private volatile TransactionManager transactionManager; ADD private volatile UserTransaction userTransaction; CON @@ -16,4 +27,9 @@ CON @Override DEL protected TransactionManager locateTransactionManager() { DEL return com.arjuna.ats.jta.TransactionManager.transactionManager(); ADD public TransactionManager retrieveTransactionManager() { ADD TransactionManager transactionManager = this.transactionManager; ADD if (transactionManager == null) { ADD transactionManager = com.arjuna.ats.jta.TransactionManager.transactionManager(); ADD this.transactionManager = transactionManager; ADD } ADD return transactionManager; CON } @@ -21,4 +37,34 @@ CON @Override DEL protected UserTransaction locateUserTransaction() { DEL return com.arjuna.ats.jta.UserTransaction.userTransaction(); ADD public TransactionManager getTransactionManager() { ADD return retrieveTransactionManager(); ADD } ADD ADD @Override ADD public UserTransaction retrieveUserTransaction() { ADD UserTransaction userTransaction = this.userTransaction; ADD if (this.userTransaction == null) { ADD userTransaction = com.arjuna.ats.jta.UserTransaction.userTransaction(); ADD this.userTransaction = userTransaction; ADD } ADD return userTransaction; ADD } ADD ADD @Override ADD public Object getTransactionIdentifier(final Transaction transaction) { ADD return transaction; ADD } ADD ADD @Override ADD public void registerSynchronization(Synchronization synchronization) { ADD this.tmSynchronizationStrategy.registerSynchronization(synchronization); ADD } ADD ADD @Override ADD public boolean canRegisterSynchronization() { ADD return this.tmSynchronizationStrategy.canRegisterSynchronization(); ADD } ADD ADD @Override ADD public int getCurrentStatus() throws SystemException { ADD return this.retrieveTransactionManager().getStatus(); CON }
<<<<<<< SEARCH package io.quarkus.hibernate.orm.runtime.customized; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import org.hibernate.engine.transaction.jta.platform.internal.AbstractJtaPlatform; public final class QuarkusJtaPlatform extends AbstractJtaPlatform { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); private QuarkusJtaPlatform() { ======= package io.quarkus.hibernate.orm.runtime.customized; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import org.hibernate.engine.transaction.jta.platform.internal.JtaSynchronizationStrategy; import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerAccess; import org.hibernate.engine.transaction.jta.platform.internal.TransactionManagerBasedSynchronizationStrategy; import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform; public final class QuarkusJtaPlatform implements JtaPlatform, TransactionManagerAccess { public static final QuarkusJtaPlatform INSTANCE = new QuarkusJtaPlatform(); private final JtaSynchronizationStrategy tmSynchronizationStrategy = new TransactionManagerBasedSynchronizationStrategy( this); private volatile TransactionManager transactionManager; private volatile UserTransaction userTransaction; private QuarkusJtaPlatform() { >>>>>>> REPLACE <<<<<<< SEARCH @Override protected TransactionManager locateTransactionManager() { return com.arjuna.ats.jta.TransactionManager.transactionManager(); } @Override protected UserTransaction locateUserTransaction() { return com.arjuna.ats.jta.UserTransaction.userTransaction(); } ======= @Override public TransactionManager retrieveTransactionManager() { TransactionManager transactionManager = this.transactionManager; if (transactionManager == null) { transactionManager = com.arjuna.ats.jta.TransactionManager.transactionManager(); this.transactionManager = transactionManager; } return transactionManager; } @Override public TransactionManager getTransactionManager() { return retrieveTransactionManager(); } @Override public UserTransaction retrieveUserTransaction() { UserTransaction userTransaction = this.userTransaction; if (this.userTransaction == null) { userTransaction = com.arjuna.ats.jta.UserTransaction.userTransaction(); this.userTransaction = userTransaction; } return userTransaction; } @Override public Object getTransactionIdentifier(final Transaction transaction) { return transaction; } @Override public void registerSynchronization(Synchronization synchronization) { this.tmSynchronizationStrategy.registerSynchronization(synchronization); } @Override public boolean canRegisterSynchronization() { return this.tmSynchronizationStrategy.canRegisterSynchronization(); } @Override public int getCurrentStatus() throws SystemException { return this.retrieveTransactionManager().getStatus(); } >>>>>>> REPLACE
databrary/databrary
05d65d646ee1fba5f07e1b203c421810da2908aa
app/assets/view/searchView.js
javascript
agpl-3.0
Include citation data in search targets
'use strict'; module.controller('searchView', [ '$scope', 'volumes', 'pageService', function ($scope, volumes, page) { page.display.title = page.constants.message('page.title.search'); // var updateData = function (data) { angular.forEach(data, function (volume) { volume.more = ''; angular.forEach(volume.access, function (access) { if (access.individual >= page.permission.ADMIN) { volume.more += ' ' + access.party.name; if ('email' in access.party) volume.more += ' ' + access.party.email; if ('affiliation' in access.party) volume.more += ' ' + access.party.affiliation; } }); }); $scope.volumes = data; }; updateData(volumes); // page.events.listen($scope, 'searchForm-init', function (form) { $scope.searchForm = $scope.searchForm || form; }); } ]);
'use strict'; module.controller('searchView', [ '$scope', 'volumes', 'pageService', function ($scope, volumes, page) { page.display.title = page.constants.message('page.title.search'); // var updateData = function (data) { angular.forEach(data, function (volume) { volume.more = ''; angular.forEach(volume.access, function (access) { if (access.individual >= page.permission.ADMIN) { volume.more += ' ' + access.party.name; if ('email' in access.party) volume.more += ' ' + access.party.email; if ('affiliation' in access.party) volume.more += ' ' + access.party.affiliation; } }); if (volume.citation) { angular.forEach(volume.citation, function (v) { volume.more += ' ' + v; }); } }); $scope.volumes = data; }; updateData(volumes); // page.events.listen($scope, 'searchForm-init', function (form) { $scope.searchForm = $scope.searchForm || form; }); } ]);
7
0
1
add_only
--- a/app/assets/view/searchView.js +++ b/app/assets/view/searchView.js @@ -21,2 +21,9 @@ }); + + if (volume.citation) { + angular.forEach(volume.citation, function (v) { + volume.more += ' ' + v; + }); + } + });
--- a/app/assets/view/searchView.js +++ b/app/assets/view/searchView.js @@ ... @@ }); + + if (volume.citation) { + angular.forEach(volume.citation, function (v) { + volume.more += ' ' + v; + }); + } + });
--- a/app/assets/view/searchView.js +++ b/app/assets/view/searchView.js @@ -21,2 +21,9 @@ CON }); ADD ADD if (volume.citation) { ADD angular.forEach(volume.citation, function (v) { ADD volume.more += ' ' + v; ADD }); ADD } ADD CON });
<<<<<<< SEARCH } }); }); ======= } }); if (volume.citation) { angular.forEach(volume.citation, function (v) { volume.more += ' ' + v; }); } }); >>>>>>> REPLACE
soasme/retries
b0efb7db50080dd1e9e96ad8d818e3b0859bbca3
retry/__init__.py
python
mit
Add a usage in retry
# -*- coding: utf-8 -*- from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
# -*- coding: utf-8 -*- from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
1
0
1
add_only
--- a/retry/__init__.py +++ b/retry/__init__.py @@ -14,2 +14,3 @@ @retry(errors=(TTransportException, AnyExpectedError)) + @retry() # detect whatsoever errors and retry 3 times '''
--- a/retry/__init__.py +++ b/retry/__init__.py @@ ... @@ @retry(errors=(TTransportException, AnyExpectedError)) + @retry() # detect whatsoever errors and retry 3 times '''
--- a/retry/__init__.py +++ b/retry/__init__.py @@ -14,2 +14,3 @@ CON @retry(errors=(TTransportException, AnyExpectedError)) ADD @retry() # detect whatsoever errors and retry 3 times CON '''
<<<<<<< SEARCH @retry(errors=(TTransportException, AnyExpectedError)) ''' ======= @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' >>>>>>> REPLACE
petertseng/exercism-rust
45513c97ca37ef5ca2a071e4c9bd27653eca89dc
exercises/roman-numerals/example.rs
rust
mit
roman-numerals: Use `iter` instead of `into_iter` when not moving Clippy suggested https://travis-ci.org/exercism/rust/builds/456432005 error: this .into_iter() call is equivalent to .iter() and will not move the array = help: for further information visit https://rust-lang-nursery.github.io/rust-clippy/v0.0.212/index.html#into_iter_on_array (compare to 75c1bf93b7045bcd25aa178a8cecf1da4bb422fc which was on slices. Apparently this is considered a warning for slices but an error on arrays)
use std::fmt; static ROMAN_MAP: [(usize, &'static str); 13] = [ (1, "I"), (4, "IV"), (5, "V"), (9, "IX"), (10, "X"), (40, "XL"), (50, "L"), (90, "XC"), (100, "C"), (400, "CD"), (500, "D"), (900, "CM"), (1000, "M"), ]; pub struct Roman { num: usize, } impl From<usize> for Roman { fn from(i: usize) -> Self { Roman::new(i) } } impl fmt::Display for Roman { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut start = self.num.clone(); let mut result = String::new(); for &(numeric, roman_string) in ROMAN_MAP.into_iter().rev() { while start >= numeric { result.push_str(roman_string); start -= numeric; } } write!(f, "{}", result) } } impl Roman { fn new(num: usize) -> Roman { Roman { num } } }
use std::fmt; static ROMAN_MAP: [(usize, &'static str); 13] = [ (1, "I"), (4, "IV"), (5, "V"), (9, "IX"), (10, "X"), (40, "XL"), (50, "L"), (90, "XC"), (100, "C"), (400, "CD"), (500, "D"), (900, "CM"), (1000, "M"), ]; pub struct Roman { num: usize, } impl From<usize> for Roman { fn from(i: usize) -> Self { Roman::new(i) } } impl fmt::Display for Roman { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut start = self.num.clone(); let mut result = String::new(); for &(numeric, roman_string) in ROMAN_MAP.iter().rev() { while start >= numeric { result.push_str(roman_string); start -= numeric; } } write!(f, "{}", result) } } impl Roman { fn new(num: usize) -> Roman { Roman { num } } }
1
1
1
mixed
--- a/exercises/roman-numerals/example.rs +++ b/exercises/roman-numerals/example.rs @@ -32,3 +32,3 @@ let mut result = String::new(); - for &(numeric, roman_string) in ROMAN_MAP.into_iter().rev() { + for &(numeric, roman_string) in ROMAN_MAP.iter().rev() { while start >= numeric {
--- a/exercises/roman-numerals/example.rs +++ b/exercises/roman-numerals/example.rs @@ ... @@ let mut result = String::new(); - for &(numeric, roman_string) in ROMAN_MAP.into_iter().rev() { + for &(numeric, roman_string) in ROMAN_MAP.iter().rev() { while start >= numeric {
--- a/exercises/roman-numerals/example.rs +++ b/exercises/roman-numerals/example.rs @@ -32,3 +32,3 @@ CON let mut result = String::new(); DEL for &(numeric, roman_string) in ROMAN_MAP.into_iter().rev() { ADD for &(numeric, roman_string) in ROMAN_MAP.iter().rev() { CON while start >= numeric {
<<<<<<< SEARCH let mut start = self.num.clone(); let mut result = String::new(); for &(numeric, roman_string) in ROMAN_MAP.into_iter().rev() { while start >= numeric { result.push_str(roman_string); ======= let mut start = self.num.clone(); let mut result = String::new(); for &(numeric, roman_string) in ROMAN_MAP.iter().rev() { while start >= numeric { result.push_str(roman_string); >>>>>>> REPLACE
saltstack/salt
3a5e2e34374f92f0412d121fb9552278105f230a
salt/acl/__init__.py
python
apache-2.0
Fix typo documention -> documentation
# -*- coding: utf-8 -*- ''' The acl module handles client_acl operations Additional information on client_acl can be found by reading the salt documention: http://docs.saltstack.com/en/latest/ref/clientacl.html ''' # Import python libraries from __future__ import absolute_import import re class ClientACL(object): ''' Represents the client ACL and provides methods to query the ACL for given operations ''' def __init__(self, blacklist): self.blacklist = blacklist def user_is_blacklisted(self, user): ''' Takes a username as a string and returns a boolean. True indicates that the provided user has been blacklisted ''' for blacklisted_user in self.blacklist.get('users', []): if re.match(blacklisted_user, user): return True return False def cmd_is_blacklisted(self, cmd): for blacklisted_module in self.blacklist.get('modules', []): # If this is a regular command, it is a single function if isinstance(cmd, str): funs_to_check = [cmd] # If this is a compound function else: funs_to_check = cmd for fun in funs_to_check: if re.match(blacklisted_module, fun): return True return False
# -*- coding: utf-8 -*- ''' The acl module handles client_acl operations Additional information on client_acl can be found by reading the salt documentation: http://docs.saltstack.com/en/latest/ref/clientacl.html ''' # Import python libraries from __future__ import absolute_import import re class ClientACL(object): ''' Represents the client ACL and provides methods to query the ACL for given operations ''' def __init__(self, blacklist): self.blacklist = blacklist def user_is_blacklisted(self, user): ''' Takes a username as a string and returns a boolean. True indicates that the provided user has been blacklisted ''' for blacklisted_user in self.blacklist.get('users', []): if re.match(blacklisted_user, user): return True return False def cmd_is_blacklisted(self, cmd): for blacklisted_module in self.blacklist.get('modules', []): # If this is a regular command, it is a single function if isinstance(cmd, str): funs_to_check = [cmd] # If this is a compound function else: funs_to_check = cmd for fun in funs_to_check: if re.match(blacklisted_module, fun): return True return False
1
1
1
mixed
--- a/salt/acl/__init__.py +++ b/salt/acl/__init__.py @@ -5,3 +5,3 @@ Additional information on client_acl can be -found by reading the salt documention: +found by reading the salt documentation:
--- a/salt/acl/__init__.py +++ b/salt/acl/__init__.py @@ ... @@ Additional information on client_acl can be -found by reading the salt documention: +found by reading the salt documentation:
--- a/salt/acl/__init__.py +++ b/salt/acl/__init__.py @@ -5,3 +5,3 @@ CON Additional information on client_acl can be DEL found by reading the salt documention: ADD found by reading the salt documentation: CON
<<<<<<< SEARCH Additional information on client_acl can be found by reading the salt documention: http://docs.saltstack.com/en/latest/ref/clientacl.html ======= Additional information on client_acl can be found by reading the salt documentation: http://docs.saltstack.com/en/latest/ref/clientacl.html >>>>>>> REPLACE
android/compose-samples
641e9341d54e0b4c7e71bef861750b91d07e47e5
Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt
kotlin
apache-2.0
[Owl] Update scrim to use `drawWithCache` Change-Id: I157671de17dd51a9e5031d07f49eb6ccf5b7d1b9
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.owl.ui.utils import androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.remember import androidx.compose.runtime.setValue import androidx.compose.ui.Modifier import androidx.compose.ui.composed import androidx.compose.ui.drawWithContent import androidx.compose.ui.graphics.Color import androidx.compose.ui.graphics.VerticalGradient /** * A [Modifier] which draws a vertical gradient */ fun Modifier.scrim(colors: List<Color>): Modifier = composed { var height by remember { mutableStateOf(0f) } val gradient = remember(colors, height) { VerticalGradient( colors = colors, startY = 0f, endY = height ) } drawWithContent { drawContent() height = size.height drawRect(brush = gradient) } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.owl.ui.utils import androidx.compose.ui.Modifier import androidx.compose.ui.drawWithCache import androidx.compose.ui.graphics.Color import androidx.compose.ui.graphics.VerticalGradient /** * A [Modifier] which draws a vertical gradient */ fun Modifier.scrim(colors: List<Color>): Modifier = drawWithCache { // Use drawWithCache modifier to create and cache the gradient when size is known or changes. val gradient = VerticalGradient( colors = colors, startY = 0f, endY = size.height ) onDraw { drawRect(brush = gradient) } }
9
18
2
mixed
--- a/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt +++ b/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt @@ -18,9 +18,4 @@ -import androidx.compose.runtime.getValue -import androidx.compose.runtime.mutableStateOf -import androidx.compose.runtime.remember -import androidx.compose.runtime.setValue import androidx.compose.ui.Modifier -import androidx.compose.ui.composed -import androidx.compose.ui.drawWithContent +import androidx.compose.ui.drawWithCache import androidx.compose.ui.graphics.Color @@ -31,14 +26,10 @@ */ -fun Modifier.scrim(colors: List<Color>): Modifier = composed { - var height by remember { mutableStateOf(0f) } - val gradient = remember(colors, height) { - VerticalGradient( - colors = colors, - startY = 0f, - endY = height - ) - } - drawWithContent { - drawContent() - height = size.height +fun Modifier.scrim(colors: List<Color>): Modifier = drawWithCache { + // Use drawWithCache modifier to create and cache the gradient when size is known or changes. + val gradient = VerticalGradient( + colors = colors, + startY = 0f, + endY = size.height + ) + onDraw { drawRect(brush = gradient)
--- a/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt +++ b/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt @@ ... @@ -import androidx.compose.runtime.getValue -import androidx.compose.runtime.mutableStateOf -import androidx.compose.runtime.remember -import androidx.compose.runtime.setValue import androidx.compose.ui.Modifier -import androidx.compose.ui.composed -import androidx.compose.ui.drawWithContent +import androidx.compose.ui.drawWithCache import androidx.compose.ui.graphics.Color @@ ... @@ */ -fun Modifier.scrim(colors: List<Color>): Modifier = composed { - var height by remember { mutableStateOf(0f) } - val gradient = remember(colors, height) { - VerticalGradient( - colors = colors, - startY = 0f, - endY = height - ) - } - drawWithContent { - drawContent() - height = size.height +fun Modifier.scrim(colors: List<Color>): Modifier = drawWithCache { + // Use drawWithCache modifier to create and cache the gradient when size is known or changes. + val gradient = VerticalGradient( + colors = colors, + startY = 0f, + endY = size.height + ) + onDraw { drawRect(brush = gradient)
--- a/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt +++ b/Owl/app/src/main/java/com/example/owl/ui/utils/Scrim.kt @@ -18,9 +18,4 @@ CON DEL import androidx.compose.runtime.getValue DEL import androidx.compose.runtime.mutableStateOf DEL import androidx.compose.runtime.remember DEL import androidx.compose.runtime.setValue CON import androidx.compose.ui.Modifier DEL import androidx.compose.ui.composed DEL import androidx.compose.ui.drawWithContent ADD import androidx.compose.ui.drawWithCache CON import androidx.compose.ui.graphics.Color @@ -31,14 +26,10 @@ CON */ DEL fun Modifier.scrim(colors: List<Color>): Modifier = composed { DEL var height by remember { mutableStateOf(0f) } DEL val gradient = remember(colors, height) { DEL VerticalGradient( DEL colors = colors, DEL startY = 0f, DEL endY = height DEL ) DEL } DEL drawWithContent { DEL drawContent() DEL height = size.height ADD fun Modifier.scrim(colors: List<Color>): Modifier = drawWithCache { ADD // Use drawWithCache modifier to create and cache the gradient when size is known or changes. ADD val gradient = VerticalGradient( ADD colors = colors, ADD startY = 0f, ADD endY = size.height ADD ) ADD onDraw { CON drawRect(brush = gradient)
<<<<<<< SEARCH package com.example.owl.ui.utils import androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.remember import androidx.compose.runtime.setValue import androidx.compose.ui.Modifier import androidx.compose.ui.composed import androidx.compose.ui.drawWithContent import androidx.compose.ui.graphics.Color import androidx.compose.ui.graphics.VerticalGradient ======= package com.example.owl.ui.utils import androidx.compose.ui.Modifier import androidx.compose.ui.drawWithCache import androidx.compose.ui.graphics.Color import androidx.compose.ui.graphics.VerticalGradient >>>>>>> REPLACE <<<<<<< SEARCH * A [Modifier] which draws a vertical gradient */ fun Modifier.scrim(colors: List<Color>): Modifier = composed { var height by remember { mutableStateOf(0f) } val gradient = remember(colors, height) { VerticalGradient( colors = colors, startY = 0f, endY = height ) } drawWithContent { drawContent() height = size.height drawRect(brush = gradient) } ======= * A [Modifier] which draws a vertical gradient */ fun Modifier.scrim(colors: List<Color>): Modifier = drawWithCache { // Use drawWithCache modifier to create and cache the gradient when size is known or changes. val gradient = VerticalGradient( colors = colors, startY = 0f, endY = size.height ) onDraw { drawRect(brush = gradient) } >>>>>>> REPLACE
pie-flavor/Kludge
45b81721b38db06289a32db7896f932d07691dc9
src/main/kotlin/flavor/pie/kludge/players.kt
kotlin
mit
Add ChatTypeMessageReceiver methods to MessageReceiver
package flavor.pie.kludge import org.spongepowered.api.entity.living.player.Player import org.spongepowered.api.item.inventory.Inventory import org.spongepowered.api.item.inventory.type.GridInventory import org.spongepowered.api.item.inventory.entity.Hotbar val Player.storageInventory get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!!
package flavor.pie.kludge import org.spongepowered.api.entity.living.player.Player import org.spongepowered.api.item.inventory.Inventory import org.spongepowered.api.item.inventory.type.GridInventory import org.spongepowered.api.item.inventory.entity.Hotbar import org.spongepowered.api.text.Text import org.spongepowered.api.text.TextElement import org.spongepowered.api.text.TextTemplate import org.spongepowered.api.text.channel.ChatTypeMessageReceiver import org.spongepowered.api.text.channel.MessageReceiver import org.spongepowered.api.text.chat.ChatType val Player.storageInventory get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! fun MessageReceiver.sendMessage(type: ChatType, message: Text): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, message) true } else { sendMessage(message) false } fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, template) true } else { sendMessage(template) false } fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate, parameters: Map<String, TextElement>): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, template, parameters) true } else { sendMessage(template, parameters) false } fun MessageReceiver.sendMessages(type: ChatType, messages: Iterable<Text>): Boolean = if (this is ChatTypeMessageReceiver) { sendMessages(type, messages) true } else { sendMessages(messages) false } fun MessageReceiver.sendMessage(type: ChatType, vararg messages: Text): Boolean = if (this is ChatTypeMessageReceiver) { sendMessages(type, *messages) true } else { sendMessages(*messages) false }
50
0
2
add_only
--- a/src/main/kotlin/flavor/pie/kludge/players.kt +++ b/src/main/kotlin/flavor/pie/kludge/players.kt @@ -6,2 +6,8 @@ import org.spongepowered.api.item.inventory.entity.Hotbar +import org.spongepowered.api.text.Text +import org.spongepowered.api.text.TextElement +import org.spongepowered.api.text.TextTemplate +import org.spongepowered.api.text.channel.ChatTypeMessageReceiver +import org.spongepowered.api.text.channel.MessageReceiver +import org.spongepowered.api.text.chat.ChatType @@ -9 +15,45 @@ get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! + +fun MessageReceiver.sendMessage(type: ChatType, message: Text): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, message) + true + } else { + sendMessage(message) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, template) + true + } else { + sendMessage(template) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate, parameters: Map<String, TextElement>): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, template, parameters) + true + } else { + sendMessage(template, parameters) + false + } +fun MessageReceiver.sendMessages(type: ChatType, messages: Iterable<Text>): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessages(type, messages) + true + } else { + sendMessages(messages) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, vararg messages: Text): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessages(type, *messages) + true + } else { + sendMessages(*messages) + false + }
--- a/src/main/kotlin/flavor/pie/kludge/players.kt +++ b/src/main/kotlin/flavor/pie/kludge/players.kt @@ ... @@ import org.spongepowered.api.item.inventory.entity.Hotbar +import org.spongepowered.api.text.Text +import org.spongepowered.api.text.TextElement +import org.spongepowered.api.text.TextTemplate +import org.spongepowered.api.text.channel.ChatTypeMessageReceiver +import org.spongepowered.api.text.channel.MessageReceiver +import org.spongepowered.api.text.chat.ChatType @@ ... @@ get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! + +fun MessageReceiver.sendMessage(type: ChatType, message: Text): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, message) + true + } else { + sendMessage(message) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, template) + true + } else { + sendMessage(template) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate, parameters: Map<String, TextElement>): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessage(type, template, parameters) + true + } else { + sendMessage(template, parameters) + false + } +fun MessageReceiver.sendMessages(type: ChatType, messages: Iterable<Text>): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessages(type, messages) + true + } else { + sendMessages(messages) + false + } + +fun MessageReceiver.sendMessage(type: ChatType, vararg messages: Text): Boolean = + if (this is ChatTypeMessageReceiver) { + sendMessages(type, *messages) + true + } else { + sendMessages(*messages) + false + }
--- a/src/main/kotlin/flavor/pie/kludge/players.kt +++ b/src/main/kotlin/flavor/pie/kludge/players.kt @@ -6,2 +6,8 @@ CON import org.spongepowered.api.item.inventory.entity.Hotbar ADD import org.spongepowered.api.text.Text ADD import org.spongepowered.api.text.TextElement ADD import org.spongepowered.api.text.TextTemplate ADD import org.spongepowered.api.text.channel.ChatTypeMessageReceiver ADD import org.spongepowered.api.text.channel.MessageReceiver ADD import org.spongepowered.api.text.chat.ChatType CON @@ -9 +15,45 @@ CON get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! ADD ADD fun MessageReceiver.sendMessage(type: ChatType, message: Text): Boolean = ADD if (this is ChatTypeMessageReceiver) { ADD sendMessage(type, message) ADD true ADD } else { ADD sendMessage(message) ADD false ADD } ADD ADD fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate): Boolean = ADD if (this is ChatTypeMessageReceiver) { ADD sendMessage(type, template) ADD true ADD } else { ADD sendMessage(template) ADD false ADD } ADD ADD fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate, parameters: Map<String, TextElement>): Boolean = ADD if (this is ChatTypeMessageReceiver) { ADD sendMessage(type, template, parameters) ADD true ADD } else { ADD sendMessage(template, parameters) ADD false ADD } ADD fun MessageReceiver.sendMessages(type: ChatType, messages: Iterable<Text>): Boolean = ADD if (this is ChatTypeMessageReceiver) { ADD sendMessages(type, messages) ADD true ADD } else { ADD sendMessages(messages) ADD false ADD } ADD ADD fun MessageReceiver.sendMessage(type: ChatType, vararg messages: Text): Boolean = ADD if (this is ChatTypeMessageReceiver) { ADD sendMessages(type, *messages) ADD true ADD } else { ADD sendMessages(*messages) ADD false ADD }
<<<<<<< SEARCH import org.spongepowered.api.item.inventory.type.GridInventory import org.spongepowered.api.item.inventory.entity.Hotbar val Player.storageInventory get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! ======= import org.spongepowered.api.item.inventory.type.GridInventory import org.spongepowered.api.item.inventory.entity.Hotbar import org.spongepowered.api.text.Text import org.spongepowered.api.text.TextElement import org.spongepowered.api.text.TextTemplate import org.spongepowered.api.text.channel.ChatTypeMessageReceiver import org.spongepowered.api.text.channel.MessageReceiver import org.spongepowered.api.text.chat.ChatType val Player.storageInventory get() = inventory.query<Inventory>(GridInventory::class.java, Hotbar::class.java)!! fun MessageReceiver.sendMessage(type: ChatType, message: Text): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, message) true } else { sendMessage(message) false } fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, template) true } else { sendMessage(template) false } fun MessageReceiver.sendMessage(type: ChatType, template: TextTemplate, parameters: Map<String, TextElement>): Boolean = if (this is ChatTypeMessageReceiver) { sendMessage(type, template, parameters) true } else { sendMessage(template, parameters) false } fun MessageReceiver.sendMessages(type: ChatType, messages: Iterable<Text>): Boolean = if (this is ChatTypeMessageReceiver) { sendMessages(type, messages) true } else { sendMessages(messages) false } fun MessageReceiver.sendMessage(type: ChatType, vararg messages: Text): Boolean = if (this is ChatTypeMessageReceiver) { sendMessages(type, *messages) true } else { sendMessages(*messages) false } >>>>>>> REPLACE
google/evergreen-checker
6f12c7b3c7bbb1a232c54fa70fb53980710ccbd9
build.gradle.kts
kotlin
apache-2.0
Update Kotlin version from 1.3.61 to 1.4.10.
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import org.jetbrains.kotlin.gradle.tasks.KotlinCompile // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { extra["kotlin_version"] = "1.3.61" repositories { google() jcenter() } dependencies { classpath("com.android.tools.build:gradle:4.1.2") classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?)) classpath("com.google.gms:google-services:4.3.5") classpath("com.google.firebase:firebase-crashlytics-gradle:2.4.1") // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files } } allprojects { repositories { google() jcenter() } } tasks.withType<KotlinCompile> { kotlinOptions { jvmTarget = "1.8" } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import org.jetbrains.kotlin.gradle.tasks.KotlinCompile // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { extra["kotlin_version"] = "1.4.10" repositories { google() jcenter() } dependencies { classpath("com.android.tools.build:gradle:4.1.2") classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?)) classpath("com.google.gms:google-services:4.3.5") classpath("com.google.firebase:firebase-crashlytics-gradle:2.4.1") // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files } } allprojects { repositories { google() jcenter() } } tasks.withType<KotlinCompile> { kotlinOptions { jvmTarget = "1.8" } }
1
1
1
mixed
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -19,3 +19,3 @@ buildscript { - extra["kotlin_version"] = "1.3.61" + extra["kotlin_version"] = "1.4.10"
--- a/build.gradle.kts +++ b/build.gradle.kts @@ ... @@ buildscript { - extra["kotlin_version"] = "1.3.61" + extra["kotlin_version"] = "1.4.10"
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -19,3 +19,3 @@ CON buildscript { DEL extra["kotlin_version"] = "1.3.61" ADD extra["kotlin_version"] = "1.4.10" CON
<<<<<<< SEARCH buildscript { extra["kotlin_version"] = "1.3.61" repositories { ======= buildscript { extra["kotlin_version"] = "1.4.10" repositories { >>>>>>> REPLACE
benweier/blizzard.js
68d1f704f477b12490b33ceb4298a6302148d30c
index.js
javascript
mit
Update Blizzard instance function signature
/** * @file Blizzard.js * @description A Node.js wrapper for the Blizzard Battle.net Community Platform API * @copyright Copyright(c) 2016 Ben Weier <[email protected]> * @license MIT * @version 1.0.0 * @module index * @requires lib/blizzard */ 'use strict'; const Blizzard = require('./lib/blizzard'); /** * Initialize the Blizzard.js instance. * * @param {Object} args Blizzard.js configuration options * @return {Object} An instance of Blizzard.js * @example * const blizzard = require('blizzard.js').initialize({api_key: process.env.BATTLENET_API_KEY}); */ const initialize = function initialize (args) { // TODO Filter args by whitelisted configuration keys const config = args; return new Blizzard(config); }; exports.initialize = initialize;
/** * @file Blizzard.js * @description A Node.js wrapper for the Blizzard Battle.net Community Platform API * @copyright Copyright(c) 2016 Ben Weier <[email protected]> * @license MIT * @version 1.0.0 * @module index * @requires lib/blizzard */ 'use strict'; /** * @typedef {Object} Blizzard * @prop {Object} account Account API methods * @prop {Object} d3 D3 API methods * @prop {Object} sc2 Sc2 API methods * @prop {Object} wow WoW API methods * @prop {Function} params Filter an objects keys from an array of keys * @prop {Function} get Perform a single request to the Blizzard API * @prop {Function} all Perform multiple requests to the Blizzard API */ const Blizzard = require('./lib/blizzard'); /** * Initialize the Blizzard.js instance. * * @param {Object} args Blizzard.js configuration options * @param {Object} axios An instance config object compatible with [axios]{@link https://github.com/mzabriskie/axios} * @return {Blizzard} An instance of Blizzard.js * @example * const blizzard = require('blizzard.js').initialize({apikey: process.env.BATTLENET_API_KEY}); */ const initialize = function initialize (args, axios) { const config = Object.assign({}, args); const instance = Object.assign({}, axios); return new Blizzard(config, instance); }; exports.initialize = initialize;
18
7
2
mixed
--- a/index.js +++ b/index.js @@ -11,2 +11,12 @@ +/** + * @typedef {Object} Blizzard + * @prop {Object} account Account API methods + * @prop {Object} d3 D3 API methods + * @prop {Object} sc2 Sc2 API methods + * @prop {Object} wow WoW API methods + * @prop {Function} params Filter an objects keys from an array of keys + * @prop {Function} get Perform a single request to the Blizzard API + * @prop {Function} all Perform multiple requests to the Blizzard API + */ const Blizzard = require('./lib/blizzard'); @@ -16,12 +26,13 @@ * - * @param {Object} args Blizzard.js configuration options - * @return {Object} An instance of Blizzard.js + * @param {Object} args Blizzard.js configuration options + * @param {Object} axios An instance config object compatible with [axios]{@link https://github.com/mzabriskie/axios} + * @return {Blizzard} An instance of Blizzard.js * @example - * const blizzard = require('blizzard.js').initialize({api_key: process.env.BATTLENET_API_KEY}); + * const blizzard = require('blizzard.js').initialize({apikey: process.env.BATTLENET_API_KEY}); */ -const initialize = function initialize (args) { - // TODO Filter args by whitelisted configuration keys - const config = args; +const initialize = function initialize (args, axios) { + const config = Object.assign({}, args); + const instance = Object.assign({}, axios); - return new Blizzard(config); + return new Blizzard(config, instance); };
--- a/index.js +++ b/index.js @@ ... @@ +/** + * @typedef {Object} Blizzard + * @prop {Object} account Account API methods + * @prop {Object} d3 D3 API methods + * @prop {Object} sc2 Sc2 API methods + * @prop {Object} wow WoW API methods + * @prop {Function} params Filter an objects keys from an array of keys + * @prop {Function} get Perform a single request to the Blizzard API + * @prop {Function} all Perform multiple requests to the Blizzard API + */ const Blizzard = require('./lib/blizzard'); @@ ... @@ * - * @param {Object} args Blizzard.js configuration options - * @return {Object} An instance of Blizzard.js + * @param {Object} args Blizzard.js configuration options + * @param {Object} axios An instance config object compatible with [axios]{@link https://github.com/mzabriskie/axios} + * @return {Blizzard} An instance of Blizzard.js * @example - * const blizzard = require('blizzard.js').initialize({api_key: process.env.BATTLENET_API_KEY}); + * const blizzard = require('blizzard.js').initialize({apikey: process.env.BATTLENET_API_KEY}); */ -const initialize = function initialize (args) { - // TODO Filter args by whitelisted configuration keys - const config = args; +const initialize = function initialize (args, axios) { + const config = Object.assign({}, args); + const instance = Object.assign({}, axios); - return new Blizzard(config); + return new Blizzard(config, instance); };
--- a/index.js +++ b/index.js @@ -11,2 +11,12 @@ CON ADD /** ADD * @typedef {Object} Blizzard ADD * @prop {Object} account Account API methods ADD * @prop {Object} d3 D3 API methods ADD * @prop {Object} sc2 Sc2 API methods ADD * @prop {Object} wow WoW API methods ADD * @prop {Function} params Filter an objects keys from an array of keys ADD * @prop {Function} get Perform a single request to the Blizzard API ADD * @prop {Function} all Perform multiple requests to the Blizzard API ADD */ CON const Blizzard = require('./lib/blizzard'); @@ -16,12 +26,13 @@ CON * DEL * @param {Object} args Blizzard.js configuration options DEL * @return {Object} An instance of Blizzard.js ADD * @param {Object} args Blizzard.js configuration options ADD * @param {Object} axios An instance config object compatible with [axios]{@link https://github.com/mzabriskie/axios} ADD * @return {Blizzard} An instance of Blizzard.js CON * @example DEL * const blizzard = require('blizzard.js').initialize({api_key: process.env.BATTLENET_API_KEY}); ADD * const blizzard = require('blizzard.js').initialize({apikey: process.env.BATTLENET_API_KEY}); CON */ DEL const initialize = function initialize (args) { DEL // TODO Filter args by whitelisted configuration keys DEL const config = args; ADD const initialize = function initialize (args, axios) { ADD const config = Object.assign({}, args); ADD const instance = Object.assign({}, axios); CON DEL return new Blizzard(config); ADD return new Blizzard(config, instance); CON };
<<<<<<< SEARCH 'use strict'; const Blizzard = require('./lib/blizzard'); /** * Initialize the Blizzard.js instance. * * @param {Object} args Blizzard.js configuration options * @return {Object} An instance of Blizzard.js * @example * const blizzard = require('blizzard.js').initialize({api_key: process.env.BATTLENET_API_KEY}); */ const initialize = function initialize (args) { // TODO Filter args by whitelisted configuration keys const config = args; return new Blizzard(config); }; ======= 'use strict'; /** * @typedef {Object} Blizzard * @prop {Object} account Account API methods * @prop {Object} d3 D3 API methods * @prop {Object} sc2 Sc2 API methods * @prop {Object} wow WoW API methods * @prop {Function} params Filter an objects keys from an array of keys * @prop {Function} get Perform a single request to the Blizzard API * @prop {Function} all Perform multiple requests to the Blizzard API */ const Blizzard = require('./lib/blizzard'); /** * Initialize the Blizzard.js instance. * * @param {Object} args Blizzard.js configuration options * @param {Object} axios An instance config object compatible with [axios]{@link https://github.com/mzabriskie/axios} * @return {Blizzard} An instance of Blizzard.js * @example * const blizzard = require('blizzard.js').initialize({apikey: process.env.BATTLENET_API_KEY}); */ const initialize = function initialize (args, axios) { const config = Object.assign({}, args); const instance = Object.assign({}, axios); return new Blizzard(config, instance); }; >>>>>>> REPLACE
gyn/exercism
11fdd9c3cd22a52f07ae825c742fea5a8ef8d0c1
rust/all-your-base/src/lib.rs
rust
bsd-2-clause
Clean up rust code for all-your-base It looks better now
/// /// Convert a number between two bases. /// /// A number is any slice of digits. /// A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize). /// Bases are specified as unsigned integers. /// /// Return an `Err(.)` if the conversion is impossible. /// The tests do not test for specific values inside the `Err(.)`. /// /// /// You are allowed to change the function signature as long as all test still pass. /// /// /// Example: /// Input /// number: &[4, 2] /// from_base: 10 /// to_base: 2 /// Result /// Ok(vec![1, 0, 1, 0, 1, 0]) /// /// The example corresponds to converting the number 42 from decimal /// which is equivalent to 101010 in binary. /// /// /// Notes: /// * The empty slice ( "[]" ) is equal to the number 0. /// * Never output leading 0 digits. However, your function must be able to /// process input with leading 0 digits. /// #[allow(unused_variables)] pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { if number.into_iter().any(|&x| x >= from_base) { return Err(()); } if from_base < 2 || to_base < 2 { return Err(()); } let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x); let mut result = (0..) .take_while(move |&x| r >= to_base.pow(x as u32)) .map(|x| { let d = r % to_base; r /= to_base; d }) .collect::<Vec<_>>(); result.reverse(); Ok(result) }
pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { if from_base < 2 || to_base < 2 { return Err(()); } if number.iter().any(|&x| x >= from_base) { return Err(()); } let limit = number.iter().fold(0, |acc, &x| acc * from_base + x); let mut r = limit; let mut result = (0..) .take_while(|&x| limit >= to_base.pow(x as u32)) .map(|_| { let d = r % to_base; r /= to_base; d }) .collect::<Vec<_>>(); result.reverse(); Ok(result) }
8
39
2
mixed
--- a/rust/all-your-base/src/lib.rs +++ b/rust/all-your-base/src/lib.rs @@ -1,38 +1,2 @@ -/// -/// Convert a number between two bases. -/// -/// A number is any slice of digits. -/// A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize). -/// Bases are specified as unsigned integers. -/// -/// Return an `Err(.)` if the conversion is impossible. -/// The tests do not test for specific values inside the `Err(.)`. -/// -/// -/// You are allowed to change the function signature as long as all test still pass. -/// -/// -/// Example: -/// Input -/// number: &[4, 2] -/// from_base: 10 -/// to_base: 2 -/// Result -/// Ok(vec![1, 0, 1, 0, 1, 0]) -/// -/// The example corresponds to converting the number 42 from decimal -/// which is equivalent to 101010 in binary. -/// -/// -/// Notes: -/// * The empty slice ( "[]" ) is equal to the number 0. -/// * Never output leading 0 digits. However, your function must be able to -/// process input with leading 0 digits. -/// -#[allow(unused_variables)] pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { - if number.into_iter().any(|&x| x >= from_base) { - return Err(()); - } - if from_base < 2 || to_base < 2 { @@ -41,7 +5,12 @@ - let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x); + if number.iter().any(|&x| x >= from_base) { + return Err(()); + } + + let limit = number.iter().fold(0, |acc, &x| acc * from_base + x); + let mut r = limit; let mut result = (0..) - .take_while(move |&x| r >= to_base.pow(x as u32)) - .map(|x| { + .take_while(|&x| limit >= to_base.pow(x as u32)) + .map(|_| { let d = r % to_base;
--- a/rust/all-your-base/src/lib.rs +++ b/rust/all-your-base/src/lib.rs @@ ... @@ -/// -/// Convert a number between two bases. -/// -/// A number is any slice of digits. -/// A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize). -/// Bases are specified as unsigned integers. -/// -/// Return an `Err(.)` if the conversion is impossible. -/// The tests do not test for specific values inside the `Err(.)`. -/// -/// -/// You are allowed to change the function signature as long as all test still pass. -/// -/// -/// Example: -/// Input -/// number: &[4, 2] -/// from_base: 10 -/// to_base: 2 -/// Result -/// Ok(vec![1, 0, 1, 0, 1, 0]) -/// -/// The example corresponds to converting the number 42 from decimal -/// which is equivalent to 101010 in binary. -/// -/// -/// Notes: -/// * The empty slice ( "[]" ) is equal to the number 0. -/// * Never output leading 0 digits. However, your function must be able to -/// process input with leading 0 digits. -/// -#[allow(unused_variables)] pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { - if number.into_iter().any(|&x| x >= from_base) { - return Err(()); - } - if from_base < 2 || to_base < 2 { @@ ... @@ - let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x); + if number.iter().any(|&x| x >= from_base) { + return Err(()); + } + + let limit = number.iter().fold(0, |acc, &x| acc * from_base + x); + let mut r = limit; let mut result = (0..) - .take_while(move |&x| r >= to_base.pow(x as u32)) - .map(|x| { + .take_while(|&x| limit >= to_base.pow(x as u32)) + .map(|_| { let d = r % to_base;
--- a/rust/all-your-base/src/lib.rs +++ b/rust/all-your-base/src/lib.rs @@ -1,38 +1,2 @@ DEL /// DEL /// Convert a number between two bases. DEL /// DEL /// A number is any slice of digits. DEL /// A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize). DEL /// Bases are specified as unsigned integers. DEL /// DEL /// Return an `Err(.)` if the conversion is impossible. DEL /// The tests do not test for specific values inside the `Err(.)`. DEL /// DEL /// DEL /// You are allowed to change the function signature as long as all test still pass. DEL /// DEL /// DEL /// Example: DEL /// Input DEL /// number: &[4, 2] DEL /// from_base: 10 DEL /// to_base: 2 DEL /// Result DEL /// Ok(vec![1, 0, 1, 0, 1, 0]) DEL /// DEL /// The example corresponds to converting the number 42 from decimal DEL /// which is equivalent to 101010 in binary. DEL /// DEL /// DEL /// Notes: DEL /// * The empty slice ( "[]" ) is equal to the number 0. DEL /// * Never output leading 0 digits. However, your function must be able to DEL /// process input with leading 0 digits. DEL /// DEL #[allow(unused_variables)] CON pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { DEL if number.into_iter().any(|&x| x >= from_base) { DEL return Err(()); DEL } DEL CON if from_base < 2 || to_base < 2 { @@ -41,7 +5,12 @@ CON DEL let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x); ADD if number.iter().any(|&x| x >= from_base) { ADD return Err(()); ADD } ADD ADD let limit = number.iter().fold(0, |acc, &x| acc * from_base + x); ADD let mut r = limit; CON CON let mut result = (0..) DEL .take_while(move |&x| r >= to_base.pow(x as u32)) DEL .map(|x| { ADD .take_while(|&x| limit >= to_base.pow(x as u32)) ADD .map(|_| { CON let d = r % to_base;
<<<<<<< SEARCH /// /// Convert a number between two bases. /// /// A number is any slice of digits. /// A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize). /// Bases are specified as unsigned integers. /// /// Return an `Err(.)` if the conversion is impossible. /// The tests do not test for specific values inside the `Err(.)`. /// /// /// You are allowed to change the function signature as long as all test still pass. /// /// /// Example: /// Input /// number: &[4, 2] /// from_base: 10 /// to_base: 2 /// Result /// Ok(vec![1, 0, 1, 0, 1, 0]) /// /// The example corresponds to converting the number 42 from decimal /// which is equivalent to 101010 in binary. /// /// /// Notes: /// * The empty slice ( "[]" ) is equal to the number 0. /// * Never output leading 0 digits. However, your function must be able to /// process input with leading 0 digits. /// #[allow(unused_variables)] pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { if number.into_iter().any(|&x| x >= from_base) { return Err(()); } if from_base < 2 || to_base < 2 { return Err(()); } let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x); let mut result = (0..) .take_while(move |&x| r >= to_base.pow(x as u32)) .map(|x| { let d = r % to_base; r /= to_base; ======= pub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> { if from_base < 2 || to_base < 2 { return Err(()); } if number.iter().any(|&x| x >= from_base) { return Err(()); } let limit = number.iter().fold(0, |acc, &x| acc * from_base + x); let mut r = limit; let mut result = (0..) .take_while(|&x| limit >= to_base.pow(x as u32)) .map(|_| { let d = r % to_base; r /= to_base; >>>>>>> REPLACE
k13n/asmstubber
3fc5b21b1970acecd54afc11122775ec668aa93b
src/main/java/org/k13n/swtstubber/util/FileUtil.java
java
mit
Add methods to write the transformed bytecode to a file
package org.k13n.swtstubber.util; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; public class FileUtil { public static byte[] writeStreamToArray(InputStream stream) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length = 0; while ((length = stream.read(buffer)) != -1) baos.write(buffer, 0, length); return baos.toByteArray(); } }
package org.k13n.swtstubber.util; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; public class FileUtil { public static byte[] writeStreamToArray(InputStream stream) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length = 0; while ((length = stream.read(buffer)) != -1) baos.write(buffer, 0, length); return baos.toByteArray(); } public static void writeBytecode(String targetDirectory, String className, byte[] bytecode) throws IOException { String packageName = packageName(className); createPackageHierarchy(targetDirectory, packageName); String file = targetDirectory + File.separator + className + ".class"; FileOutputStream stream = new FileOutputStream(new File(file)); stream.write(bytecode, 0, bytecode.length); stream.flush(); stream.close(); } private static String createPackageHierarchy(String targetDirectory, String packageName) { String path = targetDirectory; for (String folder : packageName.split("\\.")) { path = path + File.separator + folder; createDirectoryIfNotExists(path); } return path; } private static void createDirectoryIfNotExists(String directory) { File file = new File(directory); if (file.exists()) { if (!file.isDirectory()) { String msg = "Path " + directory + " exists, but is not a directory"; throw new RuntimeException(msg); } } else { file.mkdir(); } } private static String packageName(String internalName) { int separatorPos = internalName.lastIndexOf("/"); String packageName = internalName.substring(0, separatorPos); return packageName.replace('/', '.'); } }
41
0
2
add_only
--- a/src/main/java/org/k13n/swtstubber/util/FileUtil.java +++ b/src/main/java/org/k13n/swtstubber/util/FileUtil.java @@ -3,2 +3,4 @@ import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; @@ -18,2 +20,41 @@ + public static void writeBytecode(String targetDirectory, String className, + byte[] bytecode) throws IOException { + String packageName = packageName(className); + createPackageHierarchy(targetDirectory, packageName); + String file = targetDirectory + File.separator + className + ".class"; + FileOutputStream stream = new FileOutputStream(new File(file)); + stream.write(bytecode, 0, bytecode.length); + stream.flush(); + stream.close(); + } + + private static String createPackageHierarchy(String targetDirectory, + String packageName) { + String path = targetDirectory; + for (String folder : packageName.split("\\.")) { + path = path + File.separator + folder; + createDirectoryIfNotExists(path); + } + return path; + } + + private static void createDirectoryIfNotExists(String directory) { + File file = new File(directory); + if (file.exists()) { + if (!file.isDirectory()) { + String msg = "Path " + directory + " exists, but is not a directory"; + throw new RuntimeException(msg); + } + } else { + file.mkdir(); + } + } + + private static String packageName(String internalName) { + int separatorPos = internalName.lastIndexOf("/"); + String packageName = internalName.substring(0, separatorPos); + return packageName.replace('/', '.'); + } + }
--- a/src/main/java/org/k13n/swtstubber/util/FileUtil.java +++ b/src/main/java/org/k13n/swtstubber/util/FileUtil.java @@ ... @@ import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; @@ ... @@ + public static void writeBytecode(String targetDirectory, String className, + byte[] bytecode) throws IOException { + String packageName = packageName(className); + createPackageHierarchy(targetDirectory, packageName); + String file = targetDirectory + File.separator + className + ".class"; + FileOutputStream stream = new FileOutputStream(new File(file)); + stream.write(bytecode, 0, bytecode.length); + stream.flush(); + stream.close(); + } + + private static String createPackageHierarchy(String targetDirectory, + String packageName) { + String path = targetDirectory; + for (String folder : packageName.split("\\.")) { + path = path + File.separator + folder; + createDirectoryIfNotExists(path); + } + return path; + } + + private static void createDirectoryIfNotExists(String directory) { + File file = new File(directory); + if (file.exists()) { + if (!file.isDirectory()) { + String msg = "Path " + directory + " exists, but is not a directory"; + throw new RuntimeException(msg); + } + } else { + file.mkdir(); + } + } + + private static String packageName(String internalName) { + int separatorPos = internalName.lastIndexOf("/"); + String packageName = internalName.substring(0, separatorPos); + return packageName.replace('/', '.'); + } + }
--- a/src/main/java/org/k13n/swtstubber/util/FileUtil.java +++ b/src/main/java/org/k13n/swtstubber/util/FileUtil.java @@ -3,2 +3,4 @@ CON import java.io.ByteArrayOutputStream; ADD import java.io.File; ADD import java.io.FileOutputStream; CON import java.io.IOException; @@ -18,2 +20,41 @@ CON ADD public static void writeBytecode(String targetDirectory, String className, ADD byte[] bytecode) throws IOException { ADD String packageName = packageName(className); ADD createPackageHierarchy(targetDirectory, packageName); ADD String file = targetDirectory + File.separator + className + ".class"; ADD FileOutputStream stream = new FileOutputStream(new File(file)); ADD stream.write(bytecode, 0, bytecode.length); ADD stream.flush(); ADD stream.close(); ADD } ADD ADD private static String createPackageHierarchy(String targetDirectory, ADD String packageName) { ADD String path = targetDirectory; ADD for (String folder : packageName.split("\\.")) { ADD path = path + File.separator + folder; ADD createDirectoryIfNotExists(path); ADD } ADD return path; ADD } ADD ADD private static void createDirectoryIfNotExists(String directory) { ADD File file = new File(directory); ADD if (file.exists()) { ADD if (!file.isDirectory()) { ADD String msg = "Path " + directory + " exists, but is not a directory"; ADD throw new RuntimeException(msg); ADD } ADD } else { ADD file.mkdir(); ADD } ADD } ADD ADD private static String packageName(String internalName) { ADD int separatorPos = internalName.lastIndexOf("/"); ADD String packageName = internalName.substring(0, separatorPos); ADD return packageName.replace('/', '.'); ADD } ADD CON }
<<<<<<< SEARCH import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; ======= import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; >>>>>>> REPLACE <<<<<<< SEARCH } } ======= } public static void writeBytecode(String targetDirectory, String className, byte[] bytecode) throws IOException { String packageName = packageName(className); createPackageHierarchy(targetDirectory, packageName); String file = targetDirectory + File.separator + className + ".class"; FileOutputStream stream = new FileOutputStream(new File(file)); stream.write(bytecode, 0, bytecode.length); stream.flush(); stream.close(); } private static String createPackageHierarchy(String targetDirectory, String packageName) { String path = targetDirectory; for (String folder : packageName.split("\\.")) { path = path + File.separator + folder; createDirectoryIfNotExists(path); } return path; } private static void createDirectoryIfNotExists(String directory) { File file = new File(directory); if (file.exists()) { if (!file.isDirectory()) { String msg = "Path " + directory + " exists, but is not a directory"; throw new RuntimeException(msg); } } else { file.mkdir(); } } private static String packageName(String internalName) { int separatorPos = internalName.lastIndexOf("/"); String packageName = internalName.substring(0, separatorPos); return packageName.replace('/', '.'); } } >>>>>>> REPLACE
depcheck/depcheck
74fe70d18f56b8f3cdf9576f79a022f5e2538695
src/utils/get-scripts.js
javascript
mit
Add cache logic to get script utility.
import fs from 'fs'; import path from 'path'; import yaml from 'js-yaml'; const travisCommands = [ // Reference: http://docs.travis-ci.com/user/customizing-the-build/#The-Build-Lifecycle 'before_install', 'install', 'before_script', 'script', 'after_success or after_failure', 'before_deploy', 'deploy', 'after_deploy', 'after_script', ]; function concat(array, item) { return array.concat(item); } function getObjectValues(object) { return Object.keys(object).map(key => object[key]); } export default function getScripts(filepath, content = null) { const basename = path.basename(filepath); const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); if (basename === 'package.json') { return getObjectValues(JSON.parse(fileContent).scripts || {}); } else if (basename === '.travis.yml') { const metadata = yaml.safeLoad(content) || {}; return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); } return []; }
import fs from 'fs'; import path from 'path'; import yaml from 'js-yaml'; const scriptCache = {}; function getCacheOrFile(key, fn) { if (scriptCache[key]) { return scriptCache[key]; } const value = fn(); scriptCache[key] = value; return value; } const travisCommands = [ // Reference: http://docs.travis-ci.com/user/customizing-the-build/#The-Build-Lifecycle 'before_install', 'install', 'before_script', 'script', 'after_success or after_failure', 'before_deploy', 'deploy', 'after_deploy', 'after_script', ]; function concat(array, item) { return array.concat(item); } function getObjectValues(object) { return Object.keys(object).map(key => object[key]); } export default function getScripts(filepath, content = null) { return getCacheOrFile(filepath, () => { const basename = path.basename(filepath); const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); if (basename === 'package.json') { return getObjectValues(JSON.parse(fileContent).scripts || {}); } else if (basename === '.travis.yml') { const metadata = yaml.safeLoad(content) || {}; return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); } return []; }); }
24
9
2
mixed
--- a/src/utils/get-scripts.js +++ b/src/utils/get-scripts.js @@ -3,2 +3,15 @@ import yaml from 'js-yaml'; + +const scriptCache = {}; + +function getCacheOrFile(key, fn) { + if (scriptCache[key]) { + return scriptCache[key]; + } + + const value = fn(); + scriptCache[key] = value; + + return value; +} @@ -26,13 +39,15 @@ export default function getScripts(filepath, content = null) { - const basename = path.basename(filepath); - const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); + return getCacheOrFile(filepath, () => { + const basename = path.basename(filepath); + const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); - if (basename === 'package.json') { - return getObjectValues(JSON.parse(fileContent).scripts || {}); - } else if (basename === '.travis.yml') { - const metadata = yaml.safeLoad(content) || {}; - return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); - } + if (basename === 'package.json') { + return getObjectValues(JSON.parse(fileContent).scripts || {}); + } else if (basename === '.travis.yml') { + const metadata = yaml.safeLoad(content) || {}; + return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); + } - return []; + return []; + }); }
--- a/src/utils/get-scripts.js +++ b/src/utils/get-scripts.js @@ ... @@ import yaml from 'js-yaml'; + +const scriptCache = {}; + +function getCacheOrFile(key, fn) { + if (scriptCache[key]) { + return scriptCache[key]; + } + + const value = fn(); + scriptCache[key] = value; + + return value; +} @@ ... @@ export default function getScripts(filepath, content = null) { - const basename = path.basename(filepath); - const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); + return getCacheOrFile(filepath, () => { + const basename = path.basename(filepath); + const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); - if (basename === 'package.json') { - return getObjectValues(JSON.parse(fileContent).scripts || {}); - } else if (basename === '.travis.yml') { - const metadata = yaml.safeLoad(content) || {}; - return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); - } + if (basename === 'package.json') { + return getObjectValues(JSON.parse(fileContent).scripts || {}); + } else if (basename === '.travis.yml') { + const metadata = yaml.safeLoad(content) || {}; + return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); + } - return []; + return []; + }); }
--- a/src/utils/get-scripts.js +++ b/src/utils/get-scripts.js @@ -3,2 +3,15 @@ CON import yaml from 'js-yaml'; ADD ADD const scriptCache = {}; ADD ADD function getCacheOrFile(key, fn) { ADD if (scriptCache[key]) { ADD return scriptCache[key]; ADD } ADD ADD const value = fn(); ADD scriptCache[key] = value; ADD ADD return value; ADD } CON @@ -26,13 +39,15 @@ CON export default function getScripts(filepath, content = null) { DEL const basename = path.basename(filepath); DEL const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); ADD return getCacheOrFile(filepath, () => { ADD const basename = path.basename(filepath); ADD const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); CON DEL if (basename === 'package.json') { DEL return getObjectValues(JSON.parse(fileContent).scripts || {}); DEL } else if (basename === '.travis.yml') { DEL const metadata = yaml.safeLoad(content) || {}; DEL return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); DEL } ADD if (basename === 'package.json') { ADD return getObjectValues(JSON.parse(fileContent).scripts || {}); ADD } else if (basename === '.travis.yml') { ADD const metadata = yaml.safeLoad(content) || {}; ADD return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); ADD } CON DEL return []; ADD return []; ADD }); CON }
<<<<<<< SEARCH import path from 'path'; import yaml from 'js-yaml'; const travisCommands = [ ======= import path from 'path'; import yaml from 'js-yaml'; const scriptCache = {}; function getCacheOrFile(key, fn) { if (scriptCache[key]) { return scriptCache[key]; } const value = fn(); scriptCache[key] = value; return value; } const travisCommands = [ >>>>>>> REPLACE <<<<<<< SEARCH export default function getScripts(filepath, content = null) { const basename = path.basename(filepath); const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); if (basename === 'package.json') { return getObjectValues(JSON.parse(fileContent).scripts || {}); } else if (basename === '.travis.yml') { const metadata = yaml.safeLoad(content) || {}; return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); } return []; } ======= export default function getScripts(filepath, content = null) { return getCacheOrFile(filepath, () => { const basename = path.basename(filepath); const fileContent = content !== null ? content : fs.readFileSync(filepath, 'utf-8'); if (basename === 'package.json') { return getObjectValues(JSON.parse(fileContent).scripts || {}); } else if (basename === '.travis.yml') { const metadata = yaml.safeLoad(content) || {}; return travisCommands.map(cmd => metadata[cmd] || []).reduce(concat, []); } return []; }); } >>>>>>> REPLACE
kidaa/rave
ed6bcfc4fa30882b9f4cfce01d90a20e83f04e68
rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java
java
apache-2.0
Add apache headers to interceptor git-svn-id: 2c5eef89e506a7ff64d405e714ba3c778b83051b@1506125 13f79535-47bb-0310-9956-ffa450edef68
package org.apache.rave.rest.interceptor; import org.apache.cxf.interceptor.Fault; import org.apache.cxf.message.Message; import org.apache.cxf.phase.AbstractPhaseInterceptor; import org.apache.cxf.phase.Phase; import org.apache.rave.rest.model.JsonResponseWrapper; /** * Created with IntelliJ IDEA. * User: erinnp * Date: 7/22/13 * Time: 4:56 PM * To change this template use File | Settings | File Templates. */ public class JsonResponseWrapperInterceptor extends AbstractPhaseInterceptor<Message> { public JsonResponseWrapperInterceptor() { super(Phase.WRITE); } @Override public void handleMessage(Message message) throws Fault { Object o = message.getContent(Object.class); JsonResponseWrapper wrapper = new JsonResponseWrapper(o); message.setContent(JsonResponseWrapper.class, wrapper); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.rest.interceptor; import org.apache.cxf.interceptor.Fault; import org.apache.cxf.message.Message; import org.apache.cxf.phase.AbstractPhaseInterceptor; import org.apache.cxf.phase.Phase; import org.apache.rave.rest.model.JsonResponseWrapper; /** * Created with IntelliJ IDEA. * User: erinnp * Date: 7/22/13 * Time: 4:56 PM * To change this template use File | Settings | File Templates. */ public class JsonResponseWrapperInterceptor extends AbstractPhaseInterceptor<Message> { public JsonResponseWrapperInterceptor() { super(Phase.WRITE); } @Override public void handleMessage(Message message) throws Fault { Object o = message.getContent(Object.class); JsonResponseWrapper wrapper = new JsonResponseWrapper(o); message.setContent(JsonResponseWrapper.class, wrapper); } }
19
0
1
add_only
--- a/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java +++ b/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java @@ -1 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.apache.rave.rest.interceptor;
--- a/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java +++ b/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java @@ ... @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.apache.rave.rest.interceptor;
--- a/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java +++ b/rave-components/rave-core-api/src/main/java/org/apache/rave/rest/interceptor/JsonResponseWrapperInterceptor.java @@ -1 +1,20 @@ ADD /* ADD * Licensed to the Apache Software Foundation (ASF) under one ADD * or more contributor license agreements. See the NOTICE file ADD * distributed with this work for additional information ADD * regarding copyright ownership. The ASF licenses this file ADD * to you under the Apache License, Version 2.0 (the ADD * "License"); you may not use this file except in compliance ADD * with the License. You may obtain a copy of the License at ADD * ADD * http://www.apache.org/licenses/LICENSE-2.0 ADD * ADD * Unless required by applicable law or agreed to in writing, ADD * software distributed under the License is distributed on an ADD * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ADD * KIND, either express or implied. See the License for the ADD * specific language governing permissions and limitations ADD * under the License. ADD */ ADD CON package org.apache.rave.rest.interceptor;
<<<<<<< SEARCH package org.apache.rave.rest.interceptor; ======= /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.rest.interceptor; >>>>>>> REPLACE
mudalov/safe-service
6b3846f61836132e048cebba184535a694583053
src/main/java/com/mudalov/safe/impl/SafeCommands.java
java
bsd-2-clause
Remove unnecessary lock on context creation
package com.mudalov.safe.impl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantLock; /** * API Entry point, links commands and execution contexts * * User: mudalov * Date: 22/01/15 * Time: 23:57 */ public class SafeCommands { private static final Logger log = LoggerFactory.getLogger(SafeCommands.class); private static final ReentrantLock groupContextLock = new ReentrantLock(); private static final Map<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); private static final String DefaultGroup = "DefaultGroup"; private SafeCommands() { } public static <T> CommandRef<T> create(AbstractCommand<T> command, String group) { GroupExecutionContext groupContext = getGroupContext(group); command.setContext(groupContext); return new CommandRef<T>(groupContext, command); } public static <T> CommandRef<T> create(AbstractCommand<T> command) { return create(command, DefaultGroup); } private static GroupExecutionContext getGroupContext(String groupName) { GroupExecutionContext context = groupContexts.get(groupName); if (context != null) { return context; } groupContextLock.lock(); try { context = groupContexts.get(groupName); if (context != null) { return context; } context = new GroupExecutionContext(groupName); groupContexts.put(groupName, context); return context; } finally { groupContextLock.unlock(); } } }
package com.mudalov.safe.impl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantLock; /** * API Entry point, links commands and execution contexts * * User: mudalov * Date: 22/01/15 * Time: 23:57 */ public class SafeCommands { private static final ConcurrentHashMap<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); private static final String DefaultGroup = "DefaultGroup"; private SafeCommands() { } public static <T> CommandRef<T> create(AbstractCommand<T> command, String group) { GroupExecutionContext groupContext = getGroupContext(group); command.setContext(groupContext); return new CommandRef<T>(groupContext, command); } public static <T> CommandRef<T> create(AbstractCommand<T> command) { return create(command, DefaultGroup); } private static GroupExecutionContext getGroupContext(String groupName) { GroupExecutionContext context = groupContexts.get(groupName); if (context != null) { return context; } context = new GroupExecutionContext(groupName); GroupExecutionContext prevContext = groupContexts.putIfAbsent(groupName, context); if (prevContext == null) { return context; } else { return prevContext; } } }
6
15
2
mixed
--- a/src/main/java/com/mudalov/safe/impl/SafeCommands.java +++ b/src/main/java/com/mudalov/safe/impl/SafeCommands.java @@ -18,7 +18,3 @@ - private static final Logger log = LoggerFactory.getLogger(SafeCommands.class); - - private static final ReentrantLock groupContextLock = new ReentrantLock(); - - private static final Map<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); + private static final ConcurrentHashMap<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); @@ -44,13 +40,8 @@ } - groupContextLock.lock(); - try { - context = groupContexts.get(groupName); - if (context != null) { - return context; - } - context = new GroupExecutionContext(groupName); - groupContexts.put(groupName, context); + context = new GroupExecutionContext(groupName); + GroupExecutionContext prevContext = groupContexts.putIfAbsent(groupName, context); + if (prevContext == null) { return context; - } finally { - groupContextLock.unlock(); + } else { + return prevContext; }
--- a/src/main/java/com/mudalov/safe/impl/SafeCommands.java +++ b/src/main/java/com/mudalov/safe/impl/SafeCommands.java @@ ... @@ - private static final Logger log = LoggerFactory.getLogger(SafeCommands.class); - - private static final ReentrantLock groupContextLock = new ReentrantLock(); - - private static final Map<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); + private static final ConcurrentHashMap<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); @@ ... @@ } - groupContextLock.lock(); - try { - context = groupContexts.get(groupName); - if (context != null) { - return context; - } - context = new GroupExecutionContext(groupName); - groupContexts.put(groupName, context); + context = new GroupExecutionContext(groupName); + GroupExecutionContext prevContext = groupContexts.putIfAbsent(groupName, context); + if (prevContext == null) { return context; - } finally { - groupContextLock.unlock(); + } else { + return prevContext; }
--- a/src/main/java/com/mudalov/safe/impl/SafeCommands.java +++ b/src/main/java/com/mudalov/safe/impl/SafeCommands.java @@ -18,7 +18,3 @@ CON DEL private static final Logger log = LoggerFactory.getLogger(SafeCommands.class); DEL DEL private static final ReentrantLock groupContextLock = new ReentrantLock(); DEL DEL private static final Map<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); ADD private static final ConcurrentHashMap<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); CON @@ -44,13 +40,8 @@ CON } DEL groupContextLock.lock(); DEL try { DEL context = groupContexts.get(groupName); DEL if (context != null) { DEL return context; DEL } DEL context = new GroupExecutionContext(groupName); DEL groupContexts.put(groupName, context); ADD context = new GroupExecutionContext(groupName); ADD GroupExecutionContext prevContext = groupContexts.putIfAbsent(groupName, context); ADD if (prevContext == null) { CON return context; DEL } finally { DEL groupContextLock.unlock(); ADD } else { ADD return prevContext; CON }
<<<<<<< SEARCH public class SafeCommands { private static final Logger log = LoggerFactory.getLogger(SafeCommands.class); private static final ReentrantLock groupContextLock = new ReentrantLock(); private static final Map<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); private static final String DefaultGroup = "DefaultGroup"; ======= public class SafeCommands { private static final ConcurrentHashMap<String, GroupExecutionContext> groupContexts = new ConcurrentHashMap<String, GroupExecutionContext>(); private static final String DefaultGroup = "DefaultGroup"; >>>>>>> REPLACE <<<<<<< SEARCH return context; } groupContextLock.lock(); try { context = groupContexts.get(groupName); if (context != null) { return context; } context = new GroupExecutionContext(groupName); groupContexts.put(groupName, context); return context; } finally { groupContextLock.unlock(); } } ======= return context; } context = new GroupExecutionContext(groupName); GroupExecutionContext prevContext = groupContexts.putIfAbsent(groupName, context); if (prevContext == null) { return context; } else { return prevContext; } } >>>>>>> REPLACE
praekelt/vumi-go
33598fd8baf527d63cef965eddfc90548b6c52b3
go/apps/jsbox/definition.py
python
bsd-3-clause
Remove non-unicode endpoints from the endpoint list.
import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints)
import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] # vumi-jssandbox-toolkit v2 endpoints try: v2_endpoints = list(js_config["endpoints"].keys()) except Exception: v2_endpoints = [] # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] v1_endpoints = [u"%s:%s" % (pool, tag)] except Exception: v1_endpoints = [] endpoints = v1_endpoints + v2_endpoints endpoints = [ep for ep in endpoints if isinstance(ep, unicode)] return sorted(set(endpoints))
8
6
2
mixed
--- a/go/apps/jsbox/definition.py +++ b/go/apps/jsbox/definition.py @@ -26,8 +26,7 @@ - endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: - endpoints.update(js_config["endpoints"].keys()) + v2_endpoints = list(js_config["endpoints"].keys()) except Exception: - pass + v2_endpoints = [] # vumi-jssandbox-toolkit v1 endpoints @@ -35,5 +34,8 @@ pool, tag = js_config["sms_tag"] - endpoints.add("%s:%s" % (pool, tag)) + v1_endpoints = [u"%s:%s" % (pool, tag)] except Exception: - pass - return sorted(endpoints) + v1_endpoints = [] + + endpoints = v1_endpoints + v2_endpoints + endpoints = [ep for ep in endpoints if isinstance(ep, unicode)] + return sorted(set(endpoints))
--- a/go/apps/jsbox/definition.py +++ b/go/apps/jsbox/definition.py @@ ... @@ - endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: - endpoints.update(js_config["endpoints"].keys()) + v2_endpoints = list(js_config["endpoints"].keys()) except Exception: - pass + v2_endpoints = [] # vumi-jssandbox-toolkit v1 endpoints @@ ... @@ pool, tag = js_config["sms_tag"] - endpoints.add("%s:%s" % (pool, tag)) + v1_endpoints = [u"%s:%s" % (pool, tag)] except Exception: - pass - return sorted(endpoints) + v1_endpoints = [] + + endpoints = v1_endpoints + v2_endpoints + endpoints = [ep for ep in endpoints if isinstance(ep, unicode)] + return sorted(set(endpoints))
--- a/go/apps/jsbox/definition.py +++ b/go/apps/jsbox/definition.py @@ -26,8 +26,7 @@ CON DEL endpoints = set() CON # vumi-jssandbox-toolkit v2 endpoints CON try: DEL endpoints.update(js_config["endpoints"].keys()) ADD v2_endpoints = list(js_config["endpoints"].keys()) CON except Exception: DEL pass ADD v2_endpoints = [] CON # vumi-jssandbox-toolkit v1 endpoints @@ -35,5 +34,8 @@ CON pool, tag = js_config["sms_tag"] DEL endpoints.add("%s:%s" % (pool, tag)) ADD v1_endpoints = [u"%s:%s" % (pool, tag)] CON except Exception: DEL pass DEL return sorted(endpoints) ADD v1_endpoints = [] ADD ADD endpoints = v1_endpoints + v2_endpoints ADD endpoints = [ep for ep in endpoints if isinstance(ep, unicode)] ADD return sorted(set(endpoints))
<<<<<<< SEARCH return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints) ======= return [] # vumi-jssandbox-toolkit v2 endpoints try: v2_endpoints = list(js_config["endpoints"].keys()) except Exception: v2_endpoints = [] # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] v1_endpoints = [u"%s:%s" % (pool, tag)] except Exception: v1_endpoints = [] endpoints = v1_endpoints + v2_endpoints endpoints = [ep for ep in endpoints if isinstance(ep, unicode)] return sorted(set(endpoints)) >>>>>>> REPLACE
parzonka/prm4j
d56b16212167742937ab5b18508f75248830179b
src/main/java/prm4j/indexing/realtime/LowLevelBinding.java
java
epl-1.0
Remove todo and add comment
/* * Copyright (c) 2012 Mateusz Parzonka, Eric Bodden * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Mateusz Parzonka - initial API and implementation */ package prm4j.indexing.realtime; import java.lang.ref.WeakReference; import prm4j.indexing.map.MinimalMapEntry; /** * A binding used by optimized indexing strategies. */ public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{ /** * Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the * parameter object. Amount of released resources can vary strongly with the implementation. */ void release(); /** * Register a map where this binding is used. * * @param mapReference */ void registerNode(WeakReference<Node> nodeReference); // TODO resource registration boolean isDisabled(); void setDisabled(boolean disable); long getTimestamp(); void setTimestamp(long timestamp); }
/* * Copyright (c) 2012 Mateusz Parzonka, Eric Bodden * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Mateusz Parzonka - initial API and implementation */ package prm4j.indexing.realtime; import java.lang.ref.WeakReference; import prm4j.indexing.map.MinimalMapEntry; /** * A binding used by optimized indexing strategies. */ public interface LowLevelBinding extends prm4j.api.Binding, MinimalMapEntry<Object, LowLevelBinding>{ /** * Releases all resources used in the indexing data structure and/or notifies monitors about unreachability of the * parameter object. Amount of released resources can vary strongly with the implementation. */ void release(); /** * Register a map which uses this binding as key. * * @param nodeRef */ void registerNode(WeakReference<Node> nodeRef); boolean isDisabled(); void setDisabled(boolean disable); long getTimestamp(); void setTimestamp(long timestamp); }
3
3
1
mixed
--- a/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java +++ b/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java @@ -28,7 +28,7 @@ /** - * Register a map where this binding is used. + * Register a map which uses this binding as key. * - * @param mapReference + * @param nodeRef */ - void registerNode(WeakReference<Node> nodeReference); // TODO resource registration + void registerNode(WeakReference<Node> nodeRef);
--- a/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java +++ b/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java @@ ... @@ /** - * Register a map where this binding is used. + * Register a map which uses this binding as key. * - * @param mapReference + * @param nodeRef */ - void registerNode(WeakReference<Node> nodeReference); // TODO resource registration + void registerNode(WeakReference<Node> nodeRef);
--- a/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java +++ b/src/main/java/prm4j/indexing/realtime/LowLevelBinding.java @@ -28,7 +28,7 @@ CON /** DEL * Register a map where this binding is used. ADD * Register a map which uses this binding as key. CON * DEL * @param mapReference ADD * @param nodeRef CON */ DEL void registerNode(WeakReference<Node> nodeReference); // TODO resource registration ADD void registerNode(WeakReference<Node> nodeRef); CON
<<<<<<< SEARCH /** * Register a map where this binding is used. * * @param mapReference */ void registerNode(WeakReference<Node> nodeReference); // TODO resource registration boolean isDisabled(); ======= /** * Register a map which uses this binding as key. * * @param nodeRef */ void registerNode(WeakReference<Node> nodeRef); boolean isDisabled(); >>>>>>> REPLACE
songzhw/AndroidTestDemo
46defa9ca3f361158a463276425601aa9f6c242a
IntoJFace/src/cn/six/uav/util/CommandRunner.java
java
apache-2.0
Fix the bug : the running of two processes are not synchronous
package cn.six.uav.util; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; /** * Created by songzhw on 2016/3/10. */ public class CommandRunner { private List<String> outputs; public void run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); ProcessBuilder procBuilder = new ProcessBuilder(cmds) .redirectErrorStream(true); final Process proc = procBuilder.start(); new Thread(new Runnable() { @Override public void run() { String line; outputs.clear(); try { InputStream is = proc.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); while( (line = reader.readLine()) != null){ outputs.add(line); } for(String aline : outputs){ System.out.println("szw line = "+aline); } } catch (IOException e) { e.printStackTrace(); } } }) .start(); } }
package cn.six.uav.util; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; /** * Created by songzhw on 2016/3/10. */ public class CommandRunner { private List<String> outputs; public int run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); ProcessBuilder procBuilder = new ProcessBuilder(cmds) .redirectErrorStream(true); final Process proc = procBuilder.start(); new Thread(new Runnable() { @Override public void run() { String line; outputs.clear(); try { InputStream is = proc.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); while( (line = reader.readLine()) != null){ outputs.add(line); } for(String aline : outputs){ System.out.println("szw line = "+aline); } } catch (IOException e) { e.printStackTrace(); } } }) .start(); return proc.waitFor(); } }
2
1
2
mixed
--- a/IntoJFace/src/cn/six/uav/util/CommandRunner.java +++ b/IntoJFace/src/cn/six/uav/util/CommandRunner.java @@ -16,3 +16,3 @@ - public void run(List<String> cmds) throws Exception { + public int run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); @@ -43,2 +43,3 @@ + return proc.waitFor(); }
--- a/IntoJFace/src/cn/six/uav/util/CommandRunner.java +++ b/IntoJFace/src/cn/six/uav/util/CommandRunner.java @@ ... @@ - public void run(List<String> cmds) throws Exception { + public int run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); @@ ... @@ + return proc.waitFor(); }
--- a/IntoJFace/src/cn/six/uav/util/CommandRunner.java +++ b/IntoJFace/src/cn/six/uav/util/CommandRunner.java @@ -16,3 +16,3 @@ CON DEL public void run(List<String> cmds) throws Exception { ADD public int run(List<String> cmds) throws Exception { CON outputs = new ArrayList<>(); @@ -43,2 +43,3 @@ CON ADD return proc.waitFor(); CON }
<<<<<<< SEARCH private List<String> outputs; public void run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); ProcessBuilder procBuilder = new ProcessBuilder(cmds) ======= private List<String> outputs; public int run(List<String> cmds) throws Exception { outputs = new ArrayList<>(); ProcessBuilder procBuilder = new ProcessBuilder(cmds) >>>>>>> REPLACE <<<<<<< SEARCH }) .start(); } } ======= }) .start(); return proc.waitFor(); } } >>>>>>> REPLACE
remigourdon/sound-editors
16b164dce67ead1336c9042daa9818617abf0e42
framework/Player.java
java
mit
Add methods to add and remove sounds from the player.
import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.SourceDataLine; import java.util.ArrayList; import framework.Sound; /** * Handles the playing of the sounds inside the list. */ public class Player { public Player() { final AudioFormat af = new AudioFormat( SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, true); try { line = AudioSystem.getSourceDataLine(af); line.open(); } catch(Exception e) { e.printStackTrace(); } list = new ArrayList<Sound>(); } private final SourceDataLine line; private ArrayList<Sound> list; // Constants private final int SAMPLE_RATE = 44100; // CD quality audio private final int MAX_16_BITS = Short.MAX_VALUE; private final int BITS_PER_SAMPLE = 16; }
import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.SourceDataLine; import java.util.ArrayList; import framework.Sound; /** * Handles the playing of the sounds inside the list. */ public class Player { public Player() { final AudioFormat af = new AudioFormat( SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, true); try { line = AudioSystem.getSourceDataLine(af); line.open(); } catch(Exception e) { e.printStackTrace(); } list = new ArrayList<Sound>(); } /** * Add the specified Sound to the list. * @param s the sound to be added */ public void addSound(Sound s) { list.add(s); } /** * Remove the specified Sound from the list. * @param s the Sound to be removed */ public void removeSound(Sound s) { list.remove(s); } private final SourceDataLine line; private ArrayList<Sound> list; // Constants private final int SAMPLE_RATE = 44100; // CD quality audio private final int MAX_16_BITS = Short.MAX_VALUE; private final int BITS_PER_SAMPLE = 16; }
16
0
1
add_only
--- a/framework/Player.java +++ b/framework/Player.java @@ -24,2 +24,18 @@ + /** + * Add the specified Sound to the list. + * @param s the sound to be added + */ + public void addSound(Sound s) { + list.add(s); + } + + /** + * Remove the specified Sound from the list. + * @param s the Sound to be removed + */ + public void removeSound(Sound s) { + list.remove(s); + } + private final SourceDataLine line;
--- a/framework/Player.java +++ b/framework/Player.java @@ ... @@ + /** + * Add the specified Sound to the list. + * @param s the sound to be added + */ + public void addSound(Sound s) { + list.add(s); + } + + /** + * Remove the specified Sound from the list. + * @param s the Sound to be removed + */ + public void removeSound(Sound s) { + list.remove(s); + } + private final SourceDataLine line;
--- a/framework/Player.java +++ b/framework/Player.java @@ -24,2 +24,18 @@ CON ADD /** ADD * Add the specified Sound to the list. ADD * @param s the sound to be added ADD */ ADD public void addSound(Sound s) { ADD list.add(s); ADD } ADD ADD /** ADD * Remove the specified Sound from the list. ADD * @param s the Sound to be removed ADD */ ADD public void removeSound(Sound s) { ADD list.remove(s); ADD } ADD CON private final SourceDataLine line;
<<<<<<< SEARCH } private final SourceDataLine line; private ArrayList<Sound> list; ======= } /** * Add the specified Sound to the list. * @param s the sound to be added */ public void addSound(Sound s) { list.add(s); } /** * Remove the specified Sound from the list. * @param s the Sound to be removed */ public void removeSound(Sound s) { list.remove(s); } private final SourceDataLine line; private ArrayList<Sound> list; >>>>>>> REPLACE
nikolay-radkov/EBudgie
c20354f89ab7e4c020ab2d38e4bcb56eb2154e62
android/app/src/main/java/com/ebudgie/MainApplication.java
java
mit
Increase the AsyncStorage capacity fromm 5mb to 100mb
package com.ebudgie; import android.app.Application; import android.util.Log; import com.facebook.react.ReactApplication; import io.underscope.react.fbak.RNAccountKitPackage; import com.cboy.rn.splashscreen.SplashScreenReactPackage; import com.oblador.vectoricons.VectorIconsPackage; import com.facebook.react.ReactInstanceManager; import com.facebook.react.ReactNativeHost; import com.facebook.react.ReactPackage; import com.facebook.react.shell.MainReactPackage; import com.facebook.soloader.SoLoader; import java.util.Arrays; import java.util.List; public class MainApplication extends Application implements ReactApplication { private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) { @Override protected boolean getUseDeveloperSupport() { return BuildConfig.DEBUG; } @Override protected List<ReactPackage> getPackages() { return Arrays.<ReactPackage>asList( new MainReactPackage(), new RNAccountKitPackage(), new SplashScreenReactPackage(), new VectorIconsPackage() ); } }; @Override public ReactNativeHost getReactNativeHost() { return mReactNativeHost; } @Override public void onCreate() { super.onCreate(); SoLoader.init(this, /* native exopackage */ false); } }
package com.ebudgie; import android.app.Application; import android.util.Log; import com.facebook.react.ReactApplication; import io.underscope.react.fbak.RNAccountKitPackage; import com.cboy.rn.splashscreen.SplashScreenReactPackage; import com.oblador.vectoricons.VectorIconsPackage; import com.facebook.react.modules.storage.ReactDatabaseSupplier; import com.facebook.react.ReactInstanceManager; import com.facebook.react.ReactNativeHost; import com.facebook.react.ReactPackage; import com.facebook.react.shell.MainReactPackage; import com.facebook.soloader.SoLoader; import java.util.Arrays; import java.util.List; public class MainApplication extends Application implements ReactApplication { private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) { @Override protected boolean getUseDeveloperSupport() { return BuildConfig.DEBUG; } @Override protected List<ReactPackage> getPackages() { long size = 100L * 1024L * 1024L; // 100 MB ReactDatabaseSupplier.getInstance(getApplicationContext()).setMaximumSize(size); return Arrays.<ReactPackage>asList( new MainReactPackage(), new RNAccountKitPackage(), new SplashScreenReactPackage(), new VectorIconsPackage() ); } }; @Override public ReactNativeHost getReactNativeHost() { return mReactNativeHost; } @Override public void onCreate() { super.onCreate(); SoLoader.init(this, /* native exopackage */ false); } }
4
0
2
add_only
--- a/android/app/src/main/java/com/ebudgie/MainApplication.java +++ b/android/app/src/main/java/com/ebudgie/MainApplication.java @@ -9,2 +9,3 @@ import com.oblador.vectoricons.VectorIconsPackage; +import com.facebook.react.modules.storage.ReactDatabaseSupplier; import com.facebook.react.ReactInstanceManager; @@ -28,2 +29,5 @@ protected List<ReactPackage> getPackages() { + long size = 100L * 1024L * 1024L; // 100 MB + ReactDatabaseSupplier.getInstance(getApplicationContext()).setMaximumSize(size); + return Arrays.<ReactPackage>asList(
--- a/android/app/src/main/java/com/ebudgie/MainApplication.java +++ b/android/app/src/main/java/com/ebudgie/MainApplication.java @@ ... @@ import com.oblador.vectoricons.VectorIconsPackage; +import com.facebook.react.modules.storage.ReactDatabaseSupplier; import com.facebook.react.ReactInstanceManager; @@ ... @@ protected List<ReactPackage> getPackages() { + long size = 100L * 1024L * 1024L; // 100 MB + ReactDatabaseSupplier.getInstance(getApplicationContext()).setMaximumSize(size); + return Arrays.<ReactPackage>asList(
--- a/android/app/src/main/java/com/ebudgie/MainApplication.java +++ b/android/app/src/main/java/com/ebudgie/MainApplication.java @@ -9,2 +9,3 @@ CON import com.oblador.vectoricons.VectorIconsPackage; ADD import com.facebook.react.modules.storage.ReactDatabaseSupplier; CON import com.facebook.react.ReactInstanceManager; @@ -28,2 +29,5 @@ CON protected List<ReactPackage> getPackages() { ADD long size = 100L * 1024L * 1024L; // 100 MB ADD ReactDatabaseSupplier.getInstance(getApplicationContext()).setMaximumSize(size); ADD CON return Arrays.<ReactPackage>asList(
<<<<<<< SEARCH import com.cboy.rn.splashscreen.SplashScreenReactPackage; import com.oblador.vectoricons.VectorIconsPackage; import com.facebook.react.ReactInstanceManager; import com.facebook.react.ReactNativeHost; ======= import com.cboy.rn.splashscreen.SplashScreenReactPackage; import com.oblador.vectoricons.VectorIconsPackage; import com.facebook.react.modules.storage.ReactDatabaseSupplier; import com.facebook.react.ReactInstanceManager; import com.facebook.react.ReactNativeHost; >>>>>>> REPLACE <<<<<<< SEARCH @Override protected List<ReactPackage> getPackages() { return Arrays.<ReactPackage>asList( new MainReactPackage(), ======= @Override protected List<ReactPackage> getPackages() { long size = 100L * 1024L * 1024L; // 100 MB ReactDatabaseSupplier.getInstance(getApplicationContext()).setMaximumSize(size); return Arrays.<ReactPackage>asList( new MainReactPackage(), >>>>>>> REPLACE
kamatama41/embulk-test-helpers
446cc75abeed059d285595c5241186b59202f53d
build.gradle.kts
kotlin
mit
Cut off support for Java 7
import com.github.kamatama41.gradle.gitrelease.GitReleaseExtension buildscript { val kotlinVersion = "1.2.31" extra["kotlinVersion"] = kotlinVersion repositories { jcenter() maven { setUrl("http://kamatama41.github.com/maven-repository/repository") } } dependencies { classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlinVersion") classpath("com.github.kamatama41:gradle-git-release-plugin:0.2.0") } } apply { plugin("idea") plugin("kotlin") plugin("com.github.kamatama41.git-release") } repositories { jcenter() } configure<JavaPluginConvention> { sourceCompatibility = JavaVersion.VERSION_1_7 targetCompatibility = JavaVersion.VERSION_1_7 } val kotlinVersion: String by extra dependencies { compile("org.jetbrains.kotlin:kotlin-stdlib:$kotlinVersion") compile("org.embulk:embulk-standards:0.8.18") compile("org.embulk:embulk-test:0.8.18") testCompile("junit:junit:4.12") } configure<GitReleaseExtension> { groupId = "com.github.kamatama41" artifactId = "embulk-test-helpers" repoUri = "[email protected]:kamatama41/maven-repository.git" repoDir = file("${System.getProperty("user.home")}/gh-maven-repository") }
import com.github.kamatama41.gradle.gitrelease.GitReleaseExtension buildscript { val kotlinVersion = "1.2.31" extra["kotlinVersion"] = kotlinVersion repositories { jcenter() maven { setUrl("http://kamatama41.github.com/maven-repository/repository") } } dependencies { classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlinVersion") classpath("com.github.kamatama41:gradle-git-release-plugin:0.2.0") } } apply { plugin("idea") plugin("kotlin") plugin("com.github.kamatama41.git-release") } repositories { jcenter() } configure<JavaPluginConvention> { sourceCompatibility = JavaVersion.VERSION_1_8 targetCompatibility = JavaVersion.VERSION_1_8 } val kotlinVersion: String by extra dependencies { compile("org.jetbrains.kotlin:kotlin-stdlib:$kotlinVersion") compile("org.embulk:embulk-standards:0.8.18") compile("org.embulk:embulk-test:0.8.18") testCompile("junit:junit:4.12") } configure<GitReleaseExtension> { groupId = "com.github.kamatama41" artifactId = "embulk-test-helpers" repoUri = "[email protected]:kamatama41/maven-repository.git" repoDir = file("${System.getProperty("user.home")}/gh-maven-repository") }
2
2
1
mixed
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -26,4 +26,4 @@ configure<JavaPluginConvention> { - sourceCompatibility = JavaVersion.VERSION_1_7 - targetCompatibility = JavaVersion.VERSION_1_7 + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 }
--- a/build.gradle.kts +++ b/build.gradle.kts @@ ... @@ configure<JavaPluginConvention> { - sourceCompatibility = JavaVersion.VERSION_1_7 - targetCompatibility = JavaVersion.VERSION_1_7 + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 }
--- a/build.gradle.kts +++ b/build.gradle.kts @@ -26,4 +26,4 @@ CON configure<JavaPluginConvention> { DEL sourceCompatibility = JavaVersion.VERSION_1_7 DEL targetCompatibility = JavaVersion.VERSION_1_7 ADD sourceCompatibility = JavaVersion.VERSION_1_8 ADD targetCompatibility = JavaVersion.VERSION_1_8 CON }
<<<<<<< SEARCH configure<JavaPluginConvention> { sourceCompatibility = JavaVersion.VERSION_1_7 targetCompatibility = JavaVersion.VERSION_1_7 } ======= configure<JavaPluginConvention> { sourceCompatibility = JavaVersion.VERSION_1_8 targetCompatibility = JavaVersion.VERSION_1_8 } >>>>>>> REPLACE
nafarlee/thoughtfuck
f8a5b27ca6932cb433643fbe4971cac2e6a00667
src/program.rs
rust
mit
Adjust depth when jump commands are seen
use command::Command; use vm::VM; pub struct Program { instructions : Vec<Command>, instruction_pointer: Option<usize>, is_seeking: bool, current_depth: u64, goal_depth: Option<u64>, } impl Program { pub fn new () -> Program { Program { instructions: Vec::new(), instruction_pointer: None, is_seeking: false, current_depth: 0, goal_depth: None, } } pub fn append(&mut self, instructions: &[Command]) { self.instructions.extend(instructions.iter().cloned()); if self.instruction_pointer.is_none() { self.instruction_pointer = Some(0); } } pub fn execute(&mut self, vm: &mut VM) { match self.instruction_pointer { None => {}, Some(mut index) => { while index < self.instructions.len() { let command = self.instructions[index]; vm.apply(command); index = index + 1; } self.instruction_pointer = Some(index); } } } }
use command::Command; use vm::VM; pub struct Program { instructions : Vec<Command>, instruction_pointer: Option<usize>, is_seeking: bool, current_depth: u64, goal_depth: Option<u64>, } impl Program { pub fn new () -> Program { Program { instructions: Vec::new(), instruction_pointer: None, is_seeking: false, current_depth: 0, goal_depth: None, } } pub fn append(&mut self, instructions: &[Command]) { self.instructions.extend(instructions.iter().cloned()); if self.instruction_pointer.is_none() { self.instruction_pointer = Some(0); } } pub fn execute(&mut self, vm: &mut VM) { match self.instruction_pointer { None => {}, Some(mut index) => { while index < self.instructions.len() { let command = self.instructions[index]; if command == Command::JumpForward { self.current_depth = self.current_depth + 1} if command == Command::JumpBackward { self.current_depth = self.current_depth - 1} vm.apply(command); index = index + 1; } self.instruction_pointer = Some(index); } } } }
2
0
1
add_only
--- a/src/program.rs +++ b/src/program.rs @@ -37,2 +37,4 @@ let command = self.instructions[index]; + if command == Command::JumpForward { self.current_depth = self.current_depth + 1} + if command == Command::JumpBackward { self.current_depth = self.current_depth - 1} vm.apply(command);
--- a/src/program.rs +++ b/src/program.rs @@ ... @@ let command = self.instructions[index]; + if command == Command::JumpForward { self.current_depth = self.current_depth + 1} + if command == Command::JumpBackward { self.current_depth = self.current_depth - 1} vm.apply(command);
--- a/src/program.rs +++ b/src/program.rs @@ -37,2 +37,4 @@ CON let command = self.instructions[index]; ADD if command == Command::JumpForward { self.current_depth = self.current_depth + 1} ADD if command == Command::JumpBackward { self.current_depth = self.current_depth - 1} CON vm.apply(command);
<<<<<<< SEARCH while index < self.instructions.len() { let command = self.instructions[index]; vm.apply(command); index = index + 1; ======= while index < self.instructions.len() { let command = self.instructions[index]; if command == Command::JumpForward { self.current_depth = self.current_depth + 1} if command == Command::JumpBackward { self.current_depth = self.current_depth - 1} vm.apply(command); index = index + 1; >>>>>>> REPLACE
Acizza/bcnotif
f85e32852cd3265bd515853c2cd71a42af4cce22
src/util/mod.rs
rust
agpl-3.0
Return error when a file is empty
#[macro_use] pub mod error; use std::path::{Path, PathBuf}; use std::fs::File; use std::io::{self, Read}; pub fn local_path(path: &str) -> io::Result<PathBuf> { let mut base = ::std::env::current_exe()?; base.pop(); base.push(path); Ok(base) } /// Creates a file only if it doesn't already exist and returns whether it was created or not. pub fn touch_file(path: &Path) -> io::Result<bool> { let exists = path.exists(); if !exists { File::create(path)?; } Ok(exists) } pub fn verify_local_file(path: &str) -> io::Result<PathBuf> { let path = local_path(path)?; touch_file(&path)?; Ok(path) } pub fn read_file(path: &Path) -> io::Result<String> { let mut file = File::open(path)?; let mut buffer = String::new(); file.read_to_string(&mut buffer)?; Ok(buffer) } pub fn lerp(v0: f32, v1: f32, t: f32) -> f32 { (1. - t) * v0 + t * v1 }
#[macro_use] pub mod error; use std::path::{Path, PathBuf}; use std::fs::File; use std::io::{self, Read, Error, ErrorKind}; pub fn local_path(path: &str) -> io::Result<PathBuf> { let mut base = ::std::env::current_exe()?; base.pop(); base.push(path); Ok(base) } /// Creates a file only if it doesn't already exist and returns whether it was created or not. pub fn touch_file(path: &Path) -> io::Result<bool> { let exists = path.exists(); if !exists { File::create(path)?; } Ok(exists) } pub fn verify_local_file(path: &str) -> io::Result<PathBuf> { let path = local_path(path)?; touch_file(&path)?; Ok(path) } pub fn read_file(path: &Path) -> io::Result<String> { let mut file = File::open(path)?; let mut buffer = String::new(); file.read_to_string(&mut buffer)?; if buffer.len() > 0 { Ok(buffer) } else { let path = path.to_str().ok_or(Error::new( ErrorKind::InvalidData, "util::read_file(): malformed path"))?; Err(Error::new(ErrorKind::InvalidData, format!("util::read_file(): {} is empty", path))) } } pub fn lerp(v0: f32, v1: f32, t: f32) -> f32 { (1. - t) * v0 + t * v1 }
10
2
2
mixed
--- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -4,3 +4,3 @@ use std::fs::File; -use std::io::{self, Read}; +use std::io::{self, Read, Error, ErrorKind}; @@ -36,3 +36,11 @@ - Ok(buffer) + if buffer.len() > 0 { + Ok(buffer) + } else { + let path = path.to_str().ok_or(Error::new( + ErrorKind::InvalidData, + "util::read_file(): malformed path"))?; + + Err(Error::new(ErrorKind::InvalidData, format!("util::read_file(): {} is empty", path))) + } }
--- a/src/util/mod.rs +++ b/src/util/mod.rs @@ ... @@ use std::fs::File; -use std::io::{self, Read}; +use std::io::{self, Read, Error, ErrorKind}; @@ ... @@ - Ok(buffer) + if buffer.len() > 0 { + Ok(buffer) + } else { + let path = path.to_str().ok_or(Error::new( + ErrorKind::InvalidData, + "util::read_file(): malformed path"))?; + + Err(Error::new(ErrorKind::InvalidData, format!("util::read_file(): {} is empty", path))) + } }
--- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -4,3 +4,3 @@ CON use std::fs::File; DEL use std::io::{self, Read}; ADD use std::io::{self, Read, Error, ErrorKind}; CON @@ -36,3 +36,11 @@ CON DEL Ok(buffer) ADD if buffer.len() > 0 { ADD Ok(buffer) ADD } else { ADD let path = path.to_str().ok_or(Error::new( ADD ErrorKind::InvalidData, ADD "util::read_file(): malformed path"))?; ADD ADD Err(Error::new(ErrorKind::InvalidData, format!("util::read_file(): {} is empty", path))) ADD } CON }
<<<<<<< SEARCH use std::path::{Path, PathBuf}; use std::fs::File; use std::io::{self, Read}; pub fn local_path(path: &str) -> io::Result<PathBuf> { ======= use std::path::{Path, PathBuf}; use std::fs::File; use std::io::{self, Read, Error, ErrorKind}; pub fn local_path(path: &str) -> io::Result<PathBuf> { >>>>>>> REPLACE <<<<<<< SEARCH file.read_to_string(&mut buffer)?; Ok(buffer) } ======= file.read_to_string(&mut buffer)?; if buffer.len() > 0 { Ok(buffer) } else { let path = path.to_str().ok_or(Error::new( ErrorKind::InvalidData, "util::read_file(): malformed path"))?; Err(Error::new(ErrorKind::InvalidData, format!("util::read_file(): {} is empty", path))) } } >>>>>>> REPLACE
adamsea/recipes-api
6732c4705caf7816423bc42c6f9b7db9079e61d6
routes/login/index.js
javascript
mit
Add section 6.1 code to provide login for users
var express = require('express'); var router = express.Router(); // Login a user with basic auth router.post('/', function (req, res, next) {}); module.exports = router;
var express = require('express'); var router = express.Router(); var auth = require('../../utils/auth'); var _ = require('lodash'); var JsonDB = require('node-json-db'); var crypto = require('crypto'); // Login a user with basic auth router.post('/', auth.basic, function (req, res, next) { var db = new JsonDB('db', false, true); var users = db.getData('/users'); function sendError() { res.status(500).json({ success: false, message: 'Unable to authenticate user.' }); } // Find the auth user to update last login var i = _.findIndex(users, {username: req.user.username}); crypto.randomBytes(16, function(err, buf) { if (!err) { var ret = _.attempt(db.push.bind(db), '/users['+i+']', { last_login: _.now(), token: buf.toString('hex') }, false); if (!_.isError(ret)) { res.json({ success: true, message: 'Successfully authenticated user.', token: buf.toString('hex') }); } else { sendError(); } } else { sendError(); } }); }); module.exports = router;
40
1
1
mixed
--- a/routes/login/index.js +++ b/routes/login/index.js @@ -2,5 +2,44 @@ var router = express.Router(); +var auth = require('../../utils/auth'); +var _ = require('lodash'); +var JsonDB = require('node-json-db'); +var crypto = require('crypto'); // Login a user with basic auth -router.post('/', function (req, res, next) {}); +router.post('/', auth.basic, function (req, res, next) { + var db = new JsonDB('db', false, true); + var users = db.getData('/users'); + + function sendError() { + res.status(500).json({ + success: false, + message: 'Unable to authenticate user.' + }); + } + + // Find the auth user to update last login + var i = _.findIndex(users, {username: req.user.username}); + crypto.randomBytes(16, function(err, buf) { + if (!err) { + var ret = _.attempt(db.push.bind(db), '/users['+i+']', { + last_login: _.now(), + token: buf.toString('hex') + }, false); + + if (!_.isError(ret)) { + res.json({ + success: true, + message: 'Successfully authenticated user.', + token: buf.toString('hex') + }); + } + else { + sendError(); + } + } + else { + sendError(); + } + }); +});
--- a/routes/login/index.js +++ b/routes/login/index.js @@ ... @@ var router = express.Router(); +var auth = require('../../utils/auth'); +var _ = require('lodash'); +var JsonDB = require('node-json-db'); +var crypto = require('crypto'); // Login a user with basic auth -router.post('/', function (req, res, next) {}); +router.post('/', auth.basic, function (req, res, next) { + var db = new JsonDB('db', false, true); + var users = db.getData('/users'); + + function sendError() { + res.status(500).json({ + success: false, + message: 'Unable to authenticate user.' + }); + } + + // Find the auth user to update last login + var i = _.findIndex(users, {username: req.user.username}); + crypto.randomBytes(16, function(err, buf) { + if (!err) { + var ret = _.attempt(db.push.bind(db), '/users['+i+']', { + last_login: _.now(), + token: buf.toString('hex') + }, false); + + if (!_.isError(ret)) { + res.json({ + success: true, + message: 'Successfully authenticated user.', + token: buf.toString('hex') + }); + } + else { + sendError(); + } + } + else { + sendError(); + } + }); +});
--- a/routes/login/index.js +++ b/routes/login/index.js @@ -2,5 +2,44 @@ CON var router = express.Router(); ADD var auth = require('../../utils/auth'); ADD var _ = require('lodash'); ADD var JsonDB = require('node-json-db'); ADD var crypto = require('crypto'); CON CON // Login a user with basic auth DEL router.post('/', function (req, res, next) {}); ADD router.post('/', auth.basic, function (req, res, next) { ADD var db = new JsonDB('db', false, true); ADD var users = db.getData('/users'); ADD ADD function sendError() { ADD res.status(500).json({ ADD success: false, ADD message: 'Unable to authenticate user.' ADD }); ADD } ADD ADD // Find the auth user to update last login ADD var i = _.findIndex(users, {username: req.user.username}); ADD crypto.randomBytes(16, function(err, buf) { ADD if (!err) { ADD var ret = _.attempt(db.push.bind(db), '/users['+i+']', { ADD last_login: _.now(), ADD token: buf.toString('hex') ADD }, false); ADD ADD if (!_.isError(ret)) { ADD res.json({ ADD success: true, ADD message: 'Successfully authenticated user.', ADD token: buf.toString('hex') ADD }); ADD } ADD else { ADD sendError(); ADD } ADD } ADD else { ADD sendError(); ADD } ADD }); ADD }); CON
<<<<<<< SEARCH var express = require('express'); var router = express.Router(); // Login a user with basic auth router.post('/', function (req, res, next) {}); module.exports = router; ======= var express = require('express'); var router = express.Router(); var auth = require('../../utils/auth'); var _ = require('lodash'); var JsonDB = require('node-json-db'); var crypto = require('crypto'); // Login a user with basic auth router.post('/', auth.basic, function (req, res, next) { var db = new JsonDB('db', false, true); var users = db.getData('/users'); function sendError() { res.status(500).json({ success: false, message: 'Unable to authenticate user.' }); } // Find the auth user to update last login var i = _.findIndex(users, {username: req.user.username}); crypto.randomBytes(16, function(err, buf) { if (!err) { var ret = _.attempt(db.push.bind(db), '/users['+i+']', { last_login: _.now(), token: buf.toString('hex') }, false); if (!_.isError(ret)) { res.json({ success: true, message: 'Successfully authenticated user.', token: buf.toString('hex') }); } else { sendError(); } } else { sendError(); } }); }); module.exports = router; >>>>>>> REPLACE
openspending/subsidystories.eu
557f276f3886a35bf97eb44e8192dfe682243112
app/scripts/widgets/visualizations.js
javascript
mit
Remove top padding from the iframe's wrapping div and set minimum height. Also, remove the .col-xs-12 class
'use strict'; var _ = require('lodash'); var $ = require('jquery'); function render(container, options) { container = $(container); options = _.extend({ items: [] }, options); _.each(options.items, function(url) { var wrapper = $('<div>') .css({ position: 'relative', paddingTop: '100%' }) .append( $('<iframe>') .attr({ width: '100%', height: '100%', border: '0', frameborder: '0', seamless: 'on', src: url }) .css({ border: 0, margin: 0, padding: 0, position: 'absolute', left: 0, top: 0, width: '100%', height: '100%' }) ); $('<div>').addClass('content-grid-item col-md-6') .append(wrapper).appendTo(container); }); } module.exports.render = render;
'use strict'; var _ = require('lodash'); var $ = require('jquery'); function render(container, options) { container = $(container); options = _.extend({ items: [] }, options); _.each(options.items, function(url) { var wrapper = $('<div>') .css({ position: 'relative', minHeight: '250vh' }) .append( $('<iframe>') .attr({ width: '100%', height: '100%', border: '0', frameborder: '0', seamless: 'on', src: url }) .css({ border: 0, margin: 0, padding: 0, position: 'absolute', left: 0, top: 0, width: '100%', height: '100%' }) ); $('<div>').addClass('content-grid-item') .append(wrapper).appendTo(container); }); } module.exports.render = render;
2
2
2
mixed
--- a/app/scripts/widgets/visualizations.js +++ b/app/scripts/widgets/visualizations.js @@ -15,3 +15,3 @@ position: 'relative', - paddingTop: '100%' + minHeight: '250vh' }) @@ -39,3 +39,3 @@ - $('<div>').addClass('content-grid-item col-md-6') + $('<div>').addClass('content-grid-item') .append(wrapper).appendTo(container);
--- a/app/scripts/widgets/visualizations.js +++ b/app/scripts/widgets/visualizations.js @@ ... @@ position: 'relative', - paddingTop: '100%' + minHeight: '250vh' }) @@ ... @@ - $('<div>').addClass('content-grid-item col-md-6') + $('<div>').addClass('content-grid-item') .append(wrapper).appendTo(container);
--- a/app/scripts/widgets/visualizations.js +++ b/app/scripts/widgets/visualizations.js @@ -15,3 +15,3 @@ CON position: 'relative', DEL paddingTop: '100%' ADD minHeight: '250vh' CON }) @@ -39,3 +39,3 @@ CON DEL $('<div>').addClass('content-grid-item col-md-6') ADD $('<div>').addClass('content-grid-item') CON .append(wrapper).appendTo(container);
<<<<<<< SEARCH .css({ position: 'relative', paddingTop: '100%' }) .append( ======= .css({ position: 'relative', minHeight: '250vh' }) .append( >>>>>>> REPLACE <<<<<<< SEARCH ); $('<div>').addClass('content-grid-item col-md-6') .append(wrapper).appendTo(container); }); ======= ); $('<div>').addClass('content-grid-item') .append(wrapper).appendTo(container); }); >>>>>>> REPLACE
alphagov/finder-frontend
5c875d9125aa71f16e1d9cc0653baa8e2bc2596d
app/assets/javascripts/modules/track-brexit-qa-choices.js
javascript
mit
Change jquery `find` to `querySelectorAll` Using `querySelectorAll` rather than `querySelector` as input is a list of radio buttons or checkboxes Remove global JQuery object as we're no longer using it.
window.GOVUK = window.GOVUK || {} window.GOVUK.Modules = window.GOVUK.Modules || {}; (function (global, GOVUK) { 'use strict' var $ = global.jQuery GOVUK.Modules.TrackBrexitQaChoices = function () { this.start = function (element) { track(element) } function track (element) { element.on('submit', function (event) { var $checkedOption, eventLabel, options var $submittedForm = $(event.target) var $checkedOptions = $submittedForm.find('input:checked') var questionKey = $submittedForm.data('question-key') if ($checkedOptions.length) { $checkedOptions.each(function (index) { $checkedOption = $(this) var checkedOptionId = $checkedOption.attr('id') var checkedOptionLabel = $submittedForm.find('label[for="' + checkedOptionId + '"]').text().trim() eventLabel = checkedOptionLabel.length ? checkedOptionLabel : $checkedOption.val() options = { transport: 'beacon', label: eventLabel } GOVUK.SearchAnalytics.trackEvent('brexit-checker-qa', questionKey, options) }) } else { // Skipped questions options = { transport: 'beacon', label: 'no choice' } GOVUK.SearchAnalytics.trackEvent('brexit-checker-qa', questionKey, options) } }) } } })(window, window.GOVUK)
window.GOVUK = window.GOVUK || {} window.GOVUK.Modules = window.GOVUK.Modules || {}; (function (global, GOVUK) { 'use strict' GOVUK.Modules.TrackBrexitQaChoices = function () { this.start = function (element) { track(element) } function track (element) { element.on('submit', function (event) { var $checkedOption, eventLabel, options var $submittedForm = event.target var $checkedOptions = $submittedForm.querySelectorAll('input:checked') var questionKey = $submittedForm.data('question-key') if ($checkedOptions.length) { $checkedOptions.each(function (index) { $checkedOption = $(this) var checkedOptionId = $checkedOption.attr('id') var checkedOptionLabel = $submittedForm.find('label[for="' + checkedOptionId + '"]').text().trim() eventLabel = checkedOptionLabel.length ? checkedOptionLabel : $checkedOption.val() options = { transport: 'beacon', label: eventLabel } GOVUK.SearchAnalytics.trackEvent('brexit-checker-qa', questionKey, options) }) } else { // Skipped questions options = { transport: 'beacon', label: 'no choice' } GOVUK.SearchAnalytics.trackEvent('brexit-checker-qa', questionKey, options) } }) } } })(window, window.GOVUK)
2
4
2
mixed
--- a/app/assets/javascripts/modules/track-brexit-qa-choices.js +++ b/app/assets/javascripts/modules/track-brexit-qa-choices.js @@ -5,4 +5,2 @@ 'use strict' - - var $ = global.jQuery @@ -16,4 +14,4 @@ var $checkedOption, eventLabel, options - var $submittedForm = $(event.target) - var $checkedOptions = $submittedForm.find('input:checked') + var $submittedForm = event.target + var $checkedOptions = $submittedForm.querySelectorAll('input:checked') var questionKey = $submittedForm.data('question-key')
--- a/app/assets/javascripts/modules/track-brexit-qa-choices.js +++ b/app/assets/javascripts/modules/track-brexit-qa-choices.js @@ ... @@ 'use strict' - - var $ = global.jQuery @@ ... @@ var $checkedOption, eventLabel, options - var $submittedForm = $(event.target) - var $checkedOptions = $submittedForm.find('input:checked') + var $submittedForm = event.target + var $checkedOptions = $submittedForm.querySelectorAll('input:checked') var questionKey = $submittedForm.data('question-key')
--- a/app/assets/javascripts/modules/track-brexit-qa-choices.js +++ b/app/assets/javascripts/modules/track-brexit-qa-choices.js @@ -5,4 +5,2 @@ CON 'use strict' DEL DEL var $ = global.jQuery CON @@ -16,4 +14,4 @@ CON var $checkedOption, eventLabel, options DEL var $submittedForm = $(event.target) DEL var $checkedOptions = $submittedForm.find('input:checked') ADD var $submittedForm = event.target ADD var $checkedOptions = $submittedForm.querySelectorAll('input:checked') CON var questionKey = $submittedForm.data('question-key')
<<<<<<< SEARCH (function (global, GOVUK) { 'use strict' var $ = global.jQuery GOVUK.Modules.TrackBrexitQaChoices = function () { ======= (function (global, GOVUK) { 'use strict' GOVUK.Modules.TrackBrexitQaChoices = function () { >>>>>>> REPLACE <<<<<<< SEARCH element.on('submit', function (event) { var $checkedOption, eventLabel, options var $submittedForm = $(event.target) var $checkedOptions = $submittedForm.find('input:checked') var questionKey = $submittedForm.data('question-key') ======= element.on('submit', function (event) { var $checkedOption, eventLabel, options var $submittedForm = event.target var $checkedOptions = $submittedForm.querySelectorAll('input:checked') var questionKey = $submittedForm.data('question-key') >>>>>>> REPLACE
claudiopastorini/claudiopastorini.github.io
ac863c20ac4094168b07d6823241d55e985ba231
site.py
python
mit
Create custom test for Jinja2
import sys from flask import Flask, render_template from flask_flatpages import FlatPages, flatpages from flask_frozen import Freezer DEBUG = True FLATPAGES_AUTO_RELOAD = DEBUG FLATPAGES_EXTENSION = '.md' FREEZER_DESTINATION = 'dist' app = Flask(__name__) app.config.from_object(__name__) pages = FlatPages(app) freezer = Freezer(app) @app.route('/') @app.route('/bio/') def index(): return render_template('bio.html', pages=pages) @app.route('/portfolio/') def portfolio(): projects = (p for p in pages if 'date' in p.meta) projects = sorted(projects, reverse=True, key=lambda p: p.meta['date']) return render_template('portfolio.html', pages=projects) @app.route('/portfolio/<path:path>/') def page(path): page = pages.get_or_404(path) return render_template('project.html', page=page) @app.route('/contatti/') def contatti(): page = pages.get_or_404("contatti") return render_template('page.html', page=page) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080)
import sys from flask import Flask, render_template from flask_flatpages import FlatPages, flatpages from flask_frozen import Freezer DEBUG = True FLATPAGES_AUTO_RELOAD = DEBUG FLATPAGES_EXTENSION = '.md' FREEZER_DESTINATION = 'dist' app = Flask(__name__) app.config.from_object(__name__) pages = FlatPages(app) freezer = Freezer(app) @app.route('/') @app.route('/bio/') def index(): return render_template('bio.html', pages=pages) @app.route('/portfolio/') def portfolio(): projects = (p for p in pages if 'date' in p.meta) projects = sorted(projects, reverse=True, key=lambda p: p.meta['date']) return render_template('portfolio.html', pages=projects) @app.route('/portfolio/<path:path>/') def page(path): page = pages.get_or_404(path) return render_template('project.html', page=page) @app.route('/contatti/') def contatti(): page = pages.get_or_404("contatti") return render_template('page.html', page=page) @app.template_test("list") def is_list(value): return isinstance(value, list) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080)
6
0
1
add_only
--- a/site.py +++ b/site.py @@ -41,2 +41,8 @@ + [email protected]_test("list") +def is_list(value): + return isinstance(value, list) + + if __name__ == '__main__':
--- a/site.py +++ b/site.py @@ ... @@ + [email protected]_test("list") +def is_list(value): + return isinstance(value, list) + + if __name__ == '__main__':
--- a/site.py +++ b/site.py @@ -41,2 +41,8 @@ CON ADD ADD @app.template_test("list") ADD def is_list(value): ADD return isinstance(value, list) ADD ADD CON if __name__ == '__main__':
<<<<<<< SEARCH return render_template('page.html', page=page) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080) ======= return render_template('page.html', page=page) @app.template_test("list") def is_list(value): return isinstance(value, list) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080) >>>>>>> REPLACE
bitcoin-solutions/multibit-hd
b4574bd20bca63ff09e8d91b93338293582e21be
mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java
java
mit
Add support for brand names in message keys to reduce translation burden
package org.multibit.hd.core.dto; /** * <p>Enum to provide the following to various UI models:</p> * <ul> * <li>High level wallet type selection (standard, Trezor, KeepKey etc)</li> * </ul> * * <p>This reduces code complexity in factory methods when deciding how to build supporting objects</p> * * @since 0.0.1 * */ public enum WalletMode { /** * Target a standard soft wallet (BIP 32 or BIP 44) */ STANDARD, /** * Target a Trezor wallet (BIP 44 only) */ TREZOR, /** * Target a KeepKey wallet (BIP 44 only) */ KEEP_KEY, // End of enum ; }
package org.multibit.hd.core.dto; /** * <p>Enum to provide the following to various UI models:</p> * <ul> * <li>High level wallet type selection (standard, Trezor, KeepKey etc)</li> * </ul> * * <p>This reduces code complexity in factory methods when deciding how to build supporting objects</p> * * @since 0.0.1 */ public enum WalletMode { /** * Target a standard soft wallet (BIP 32 or BIP 44) */ STANDARD("MultiBit"), /** * Target a Trezor wallet (BIP 44 only) */ TREZOR("Trezor"), /** * Target a KeepKey wallet (BIP 44 only) */ KEEP_KEY("KeepKey"), // End of enum ; private final String brand; WalletMode(String brand) { this.brand = brand; } /** * @return The brand name for use with localisation */ public String brand() { return brand; } }
15
4
5
mixed
--- a/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java +++ b/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java @@ -11,3 +11,2 @@ * @since 0.0.1 - * */ @@ -18,3 +17,3 @@ */ - STANDARD, + STANDARD("MultiBit"), @@ -23,3 +22,3 @@ */ - TREZOR, + TREZOR("Trezor"), @@ -28,3 +27,3 @@ */ - KEEP_KEY, + KEEP_KEY("KeepKey"), @@ -33,2 +32,14 @@ + private final String brand; + + WalletMode(String brand) { + this.brand = brand; + } + + /** + * @return The brand name for use with localisation + */ + public String brand() { + return brand; + } }
--- a/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java +++ b/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java @@ ... @@ * @since 0.0.1 - * */ @@ ... @@ */ - STANDARD, + STANDARD("MultiBit"), @@ ... @@ */ - TREZOR, + TREZOR("Trezor"), @@ ... @@ */ - KEEP_KEY, + KEEP_KEY("KeepKey"), @@ ... @@ + private final String brand; + + WalletMode(String brand) { + this.brand = brand; + } + + /** + * @return The brand name for use with localisation + */ + public String brand() { + return brand; + } }
--- a/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java +++ b/mbhd-core/src/main/java/org/multibit/hd/core/dto/WalletMode.java @@ -11,3 +11,2 @@ CON * @since 0.0.1 DEL * CON */ @@ -18,3 +17,3 @@ CON */ DEL STANDARD, ADD STANDARD("MultiBit"), CON @@ -23,3 +22,3 @@ CON */ DEL TREZOR, ADD TREZOR("Trezor"), CON @@ -28,3 +27,3 @@ CON */ DEL KEEP_KEY, ADD KEEP_KEY("KeepKey"), CON @@ -33,2 +32,14 @@ CON ADD private final String brand; ADD ADD WalletMode(String brand) { ADD this.brand = brand; ADD } ADD ADD /** ADD * @return The brand name for use with localisation ADD */ ADD public String brand() { ADD return brand; ADD } CON }
<<<<<<< SEARCH * * @since 0.0.1 * */ public enum WalletMode { ======= * * @since 0.0.1 */ public enum WalletMode { >>>>>>> REPLACE <<<<<<< SEARCH * Target a standard soft wallet (BIP 32 or BIP 44) */ STANDARD, /** * Target a Trezor wallet (BIP 44 only) */ TREZOR, /** * Target a KeepKey wallet (BIP 44 only) */ KEEP_KEY, // End of enum ; } ======= * Target a standard soft wallet (BIP 32 or BIP 44) */ STANDARD("MultiBit"), /** * Target a Trezor wallet (BIP 44 only) */ TREZOR("Trezor"), /** * Target a KeepKey wallet (BIP 44 only) */ KEEP_KEY("KeepKey"), // End of enum ; private final String brand; WalletMode(String brand) { this.brand = brand; } /** * @return The brand name for use with localisation */ public String brand() { return brand; } } >>>>>>> REPLACE
carols10cents/sassers
9058174b23f381c57c016bfd929cd89a2e1c92fd
src/main.rs
rust
mit
Print nicer error messages for myself
extern crate sassers; extern crate docopt; use docopt::Docopt; use std::fs::File; use std::io::Read; use std::path::Path; fn main() { const VERSION: &'static str = env!("CARGO_PKG_VERSION"); static USAGE: &'static str = " Usage: sassers [-t <style>] <inputfile> sassers [-vh] Options: -h, --help Show this message -v, --version Show the version -t <style>, --style <style> Output style [default: nested] "; let args = Docopt::new(USAGE) .and_then(|d| d.parse()) .unwrap_or_else(|e| e.exit()); if args.get_bool("-v") { println!("{}", VERSION); } else { let style = args.get_str("-t"); let inputfile = args.get_str("<inputfile>"); let mut sass = String::new(); File::open(&Path::new(&inputfile)).unwrap().read_to_string(&mut sass).unwrap(); match sassers::compile(&sass, style) { Ok(compiled) => println!("{}", compiled), Err(msg) => println!("Compilation failed: {}", msg), } } }
extern crate sassers; extern crate docopt; use docopt::Docopt; use std::fs::File; use std::io::Read; use std::path::Path; fn main() { const VERSION: &'static str = env!("CARGO_PKG_VERSION"); static USAGE: &'static str = " Usage: sassers [-t <style>] <inputfile> sassers [-vh] Options: -h, --help Show this message -v, --version Show the version -t <style>, --style <style> Output style [default: nested] "; let args = Docopt::new(USAGE) .and_then(|d| d.parse()) .unwrap_or_else(|e| e.exit()); if args.get_bool("-v") { println!("{}", VERSION); } else { let style = args.get_str("-t"); let inputfile = args.get_str("<inputfile>"); let mut sass = String::new(); let mut file = match File::open(&Path::new(&inputfile)) { Ok(f) => f, Err(msg) => panic!("File not found! {}", msg), }; match file.read_to_string(&mut sass) { Ok(_) => { match sassers::compile(&sass, style) { Ok(compiled) => println!("{}", compiled), Err(msg) => println!("Compilation failed: {}", msg), } }, Err(msg) => panic!("Could not read file! {}", msg), } } }
12
5
1
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -32,7 +32,14 @@ let mut sass = String::new(); - File::open(&Path::new(&inputfile)).unwrap().read_to_string(&mut sass).unwrap(); - - match sassers::compile(&sass, style) { - Ok(compiled) => println!("{}", compiled), - Err(msg) => println!("Compilation failed: {}", msg), + let mut file = match File::open(&Path::new(&inputfile)) { + Ok(f) => f, + Err(msg) => panic!("File not found! {}", msg), + }; + match file.read_to_string(&mut sass) { + Ok(_) => { + match sassers::compile(&sass, style) { + Ok(compiled) => println!("{}", compiled), + Err(msg) => println!("Compilation failed: {}", msg), + } + }, + Err(msg) => panic!("Could not read file! {}", msg), }
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ let mut sass = String::new(); - File::open(&Path::new(&inputfile)).unwrap().read_to_string(&mut sass).unwrap(); - - match sassers::compile(&sass, style) { - Ok(compiled) => println!("{}", compiled), - Err(msg) => println!("Compilation failed: {}", msg), + let mut file = match File::open(&Path::new(&inputfile)) { + Ok(f) => f, + Err(msg) => panic!("File not found! {}", msg), + }; + match file.read_to_string(&mut sass) { + Ok(_) => { + match sassers::compile(&sass, style) { + Ok(compiled) => println!("{}", compiled), + Err(msg) => println!("Compilation failed: {}", msg), + } + }, + Err(msg) => panic!("Could not read file! {}", msg), }
--- a/src/main.rs +++ b/src/main.rs @@ -32,7 +32,14 @@ CON let mut sass = String::new(); DEL File::open(&Path::new(&inputfile)).unwrap().read_to_string(&mut sass).unwrap(); DEL DEL match sassers::compile(&sass, style) { DEL Ok(compiled) => println!("{}", compiled), DEL Err(msg) => println!("Compilation failed: {}", msg), ADD let mut file = match File::open(&Path::new(&inputfile)) { ADD Ok(f) => f, ADD Err(msg) => panic!("File not found! {}", msg), ADD }; ADD match file.read_to_string(&mut sass) { ADD Ok(_) => { ADD match sassers::compile(&sass, style) { ADD Ok(compiled) => println!("{}", compiled), ADD Err(msg) => println!("Compilation failed: {}", msg), ADD } ADD }, ADD Err(msg) => panic!("Could not read file! {}", msg), CON }
<<<<<<< SEARCH let mut sass = String::new(); File::open(&Path::new(&inputfile)).unwrap().read_to_string(&mut sass).unwrap(); match sassers::compile(&sass, style) { Ok(compiled) => println!("{}", compiled), Err(msg) => println!("Compilation failed: {}", msg), } } ======= let mut sass = String::new(); let mut file = match File::open(&Path::new(&inputfile)) { Ok(f) => f, Err(msg) => panic!("File not found! {}", msg), }; match file.read_to_string(&mut sass) { Ok(_) => { match sassers::compile(&sass, style) { Ok(compiled) => println!("{}", compiled), Err(msg) => println!("Compilation failed: {}", msg), } }, Err(msg) => panic!("Could not read file! {}", msg), } } >>>>>>> REPLACE
wahn/Rust_Examples
998b9161811cdf69a9f5aa437ae48bcc65a9489f
skat/src/main.rs
rust
mit
Select player first (before entering the loop).
extern crate rand; use rand::Rng; use std::io; enum Card { ClubsAce, ClubsTen, ClubsKing, ClubsQueen, ClubsJack, ClubsNine, ClubsEight, ClubsSeven, SpadesAce, SpadesTen, SpadesKing, SpadesQueen, SpadesJack, SpadesNine, SpadesEight, SpadesSeven, HeartsAce, HeartsTen, HeartsKing, HeartsQueen, HeartsJack, HeartsNine, HeartsEight, HeartsSeven, DiamondsAce, DiamondsTen, DiamondsKing, DiamondsQueen, DiamondsJack, DiamondsNine, DiamondsEight, DiamondsSeven, } enum Player { A, B, C, } fn main() { loop { // randomly select player let player_number = rand::thread_rng().gen_range(0, 3); match player_number { 0 => println!("player A:"), 1 => println!("player B:"), 2 => println!("player C:"), _ => break, } let mut input = String::new(); io::stdin().read_line(&mut input) .ok() .expect("failed to read line"); let input: u8 = match input.trim().parse() { Ok(num) => num, Err(_) => break, }; println!("your input: {}", input); } }
extern crate rand; use rand::Rng; use std::io; enum Card { ClubsAce, ClubsTen, ClubsKing, ClubsQueen, ClubsJack, ClubsNine, ClubsEight, ClubsSeven, SpadesAce, SpadesTen, SpadesKing, SpadesQueen, SpadesJack, SpadesNine, SpadesEight, SpadesSeven, HeartsAce, HeartsTen, HeartsKing, HeartsQueen, HeartsJack, HeartsNine, HeartsEight, HeartsSeven, DiamondsAce, DiamondsTen, DiamondsKing, DiamondsQueen, DiamondsJack, DiamondsNine, DiamondsEight, DiamondsSeven, } enum Player { A, B, C, } fn main() { // randomly select player let player_number = rand::thread_rng().gen_range(0, 3); match player_number { 0 => println!("player A:"), 1 => println!("player B:"), 2 => println!("player C:"), _ => panic!("Uknown player {}", player_number), } loop { let mut input = String::new(); io::stdin().read_line(&mut input) .ok() .expect("failed to read line"); let input: u8 = match input.trim().parse() { Ok(num) => num, Err(_) => break, }; println!("your input: {}", input); } }
8
8
1
mixed
--- a/skat/src/main.rs +++ b/skat/src/main.rs @@ -47,11 +47,11 @@ fn main() { + // randomly select player + let player_number = rand::thread_rng().gen_range(0, 3); + match player_number { + 0 => println!("player A:"), + 1 => println!("player B:"), + 2 => println!("player C:"), + _ => panic!("Uknown player {}", player_number), + } loop { - // randomly select player - let player_number = rand::thread_rng().gen_range(0, 3); - match player_number { - 0 => println!("player A:"), - 1 => println!("player B:"), - 2 => println!("player C:"), - _ => break, - } let mut input = String::new();
--- a/skat/src/main.rs +++ b/skat/src/main.rs @@ ... @@ fn main() { + // randomly select player + let player_number = rand::thread_rng().gen_range(0, 3); + match player_number { + 0 => println!("player A:"), + 1 => println!("player B:"), + 2 => println!("player C:"), + _ => panic!("Uknown player {}", player_number), + } loop { - // randomly select player - let player_number = rand::thread_rng().gen_range(0, 3); - match player_number { - 0 => println!("player A:"), - 1 => println!("player B:"), - 2 => println!("player C:"), - _ => break, - } let mut input = String::new();
--- a/skat/src/main.rs +++ b/skat/src/main.rs @@ -47,11 +47,11 @@ CON fn main() { ADD // randomly select player ADD let player_number = rand::thread_rng().gen_range(0, 3); ADD match player_number { ADD 0 => println!("player A:"), ADD 1 => println!("player B:"), ADD 2 => println!("player C:"), ADD _ => panic!("Uknown player {}", player_number), ADD } CON loop { DEL // randomly select player DEL let player_number = rand::thread_rng().gen_range(0, 3); DEL match player_number { DEL 0 => println!("player A:"), DEL 1 => println!("player B:"), DEL 2 => println!("player C:"), DEL _ => break, DEL } CON let mut input = String::new();
<<<<<<< SEARCH fn main() { loop { // randomly select player let player_number = rand::thread_rng().gen_range(0, 3); match player_number { 0 => println!("player A:"), 1 => println!("player B:"), 2 => println!("player C:"), _ => break, } let mut input = String::new(); io::stdin().read_line(&mut input) ======= fn main() { // randomly select player let player_number = rand::thread_rng().gen_range(0, 3); match player_number { 0 => println!("player A:"), 1 => println!("player B:"), 2 => println!("player C:"), _ => panic!("Uknown player {}", player_number), } loop { let mut input = String::new(); io::stdin().read_line(&mut input) >>>>>>> REPLACE
allen-garvey/photog-spark
1c14c4ac824f2242cc8beeb14f836ee4a9e030cc
src/main/kotlin/main/main.kt
kotlin
mit
Edit routes to allow matches with trailing slash
package main /** * Created by allen on 7/28/16. */ import spark.Spark.* import com.google.gson.Gson import controllers.SqliteController import models.User import spark.ModelAndView import spark.template.handlebars.HandlebarsTemplateEngine import java.util.* fun main(args : Array<String>) { port(3000) staticFiles.location("/public"); //gzip everything after({req, res -> res.header("Content-Encoding", "gzip") }) //used to parse and convert JSON val gson = Gson() val templateEngine = HandlebarsTemplateEngine() // SqliteController.selectAllAlbums() get("/", { req, res -> ModelAndView(hashMapOf(Pair("name", "Test")), "index.hbs") }, templateEngine) get("/hello/:name", { req, res -> ModelAndView(hashMapOf(Pair("name", req.params(":name"))), "index.hbs") }, templateEngine) get("/user/:first/:last/json", { req, res -> User(req.params(":first"), req.params(":last")) }, { gson.toJson(it) }) get("/api/albums", { req, res -> SqliteController.selectAllAlbums() }, { gson.toJson(it) }) get("/api/albums/:id/images", { req, res -> SqliteController.imagesForAlbum(req.params(":id")) }, { gson.toJson(it) }) }
package main /** * Created by allen on 7/28/16. */ import spark.Spark.* import com.google.gson.Gson import controllers.SqliteController import models.User import spark.ModelAndView import spark.template.handlebars.HandlebarsTemplateEngine import java.util.* fun main(args : Array<String>) { port(3000) staticFiles.location("/public") //allow routes to match with trailing slash before({ req, res -> val path = req.pathInfo() if (path.endsWith("/")){ res.redirect(path.substring(0, path.length - 1)) } }) //gzip everything after({req, res -> res.header("Content-Encoding", "gzip") }) //used to parse and convert JSON val gson = Gson() val templateEngine = HandlebarsTemplateEngine() // SqliteController.selectAllAlbums() get("/", { req, res -> ModelAndView(hashMapOf(Pair("name", "Test")), "index.hbs") }, templateEngine) get("/hello/:name", { req, res -> ModelAndView(hashMapOf(Pair("name", req.params(":name"))), "index.hbs") }, templateEngine) get("/user/:first/:last/json", { req, res -> User(req.params(":first"), req.params(":last")) }, { gson.toJson(it) }) get("/api/albums", { req, res -> SqliteController.selectAllAlbums() }, { gson.toJson(it) }) get("/api/albums/:id/images", { req, res -> SqliteController.imagesForAlbum(req.params(":id")) }, { gson.toJson(it) }) }
9
1
1
mixed
--- a/src/main/kotlin/main/main.kt +++ b/src/main/kotlin/main/main.kt @@ -16,3 +16,11 @@ - staticFiles.location("/public"); + staticFiles.location("/public") + + //allow routes to match with trailing slash + before({ req, res -> + val path = req.pathInfo() + if (path.endsWith("/")){ + res.redirect(path.substring(0, path.length - 1)) + } + })
--- a/src/main/kotlin/main/main.kt +++ b/src/main/kotlin/main/main.kt @@ ... @@ - staticFiles.location("/public"); + staticFiles.location("/public") + + //allow routes to match with trailing slash + before({ req, res -> + val path = req.pathInfo() + if (path.endsWith("/")){ + res.redirect(path.substring(0, path.length - 1)) + } + })
--- a/src/main/kotlin/main/main.kt +++ b/src/main/kotlin/main/main.kt @@ -16,3 +16,11 @@ CON DEL staticFiles.location("/public"); ADD staticFiles.location("/public") ADD ADD //allow routes to match with trailing slash ADD before({ req, res -> ADD val path = req.pathInfo() ADD if (path.endsWith("/")){ ADD res.redirect(path.substring(0, path.length - 1)) ADD } ADD }) CON
<<<<<<< SEARCH port(3000) staticFiles.location("/public"); //gzip everything ======= port(3000) staticFiles.location("/public") //allow routes to match with trailing slash before({ req, res -> val path = req.pathInfo() if (path.endsWith("/")){ res.redirect(path.substring(0, path.length - 1)) } }) //gzip everything >>>>>>> REPLACE
StewEsho/Wa-Tor
e9251220406d1e31abbc5e9ba82e50ba94709996
core/src/com/stewesho/wator/Main.java
java
mit
Revert "added entity lists(libgdx arrays) for fishes and sharks" This reverts commit 80c76c829d63f67faa83ef7c2aaa8a601ce461ef.
package com.stewesho.wator; import com.badlogic.gdx.ApplicationAdapter; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.OrthographicCamera; public class Main extends ApplicationAdapter { SpriteBatch batch; OrthographicCamera cam; WorldManager world; @Override public void create () { batch = new SpriteBatch(); cam = new OrthographicCamera(50, 50); world = new WorldManager(25, 25); } @Override public void render () { Gdx.gl.glClearColor(0.250f, 0.250f, 0.300f, 1.000f); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); batch.setProjectionMatrix(cam.combined); batch.begin(); batch.draw(world.run(), -world.getMap.getWidth()/2, -world.getMap.getHeight()/2); batch.end(); } @Override public void dispose () { batch.dispose(); // map.disposeResources(); } }
package com.stewesho.wator; import com.badlogic.gdx.ApplicationAdapter; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.OrthographicCamera; public class Main extends ApplicationAdapter { SpriteBatch batch; OrthographicCamera cam; @Override public void create () { batch = new SpriteBatch(); cam = new OrthographicCamera(50, 50); } @Override public void render () { Gdx.gl.glClearColor(0.250f, 0.250f, 0.300f, 1.000f); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); batch.setProjectionMatrix(cam.combined); batch.begin(); // batch.draw(map.render(), -map.getWidth()/2, -map.getHeight()/2); batch.end(); } @Override public void dispose () { batch.dispose(); // map.disposeResources(); } }
1
3
3
mixed
--- a/core/src/com/stewesho/wator/Main.java +++ b/core/src/com/stewesho/wator/Main.java @@ -12,3 +12,2 @@ OrthographicCamera cam; - WorldManager world; @@ -18,3 +17,2 @@ cam = new OrthographicCamera(50, 50); - world = new WorldManager(25, 25); } @@ -29,3 +27,3 @@ - batch.draw(world.run(), -world.getMap.getWidth()/2, -world.getMap.getHeight()/2); + // batch.draw(map.render(), -map.getWidth()/2, -map.getHeight()/2);
--- a/core/src/com/stewesho/wator/Main.java +++ b/core/src/com/stewesho/wator/Main.java @@ ... @@ OrthographicCamera cam; - WorldManager world; @@ ... @@ cam = new OrthographicCamera(50, 50); - world = new WorldManager(25, 25); } @@ ... @@ - batch.draw(world.run(), -world.getMap.getWidth()/2, -world.getMap.getHeight()/2); + // batch.draw(map.render(), -map.getWidth()/2, -map.getHeight()/2);
--- a/core/src/com/stewesho/wator/Main.java +++ b/core/src/com/stewesho/wator/Main.java @@ -12,3 +12,2 @@ CON OrthographicCamera cam; DEL WorldManager world; CON @@ -18,3 +17,2 @@ CON cam = new OrthographicCamera(50, 50); DEL world = new WorldManager(25, 25); CON } @@ -29,3 +27,3 @@ CON DEL batch.draw(world.run(), -world.getMap.getWidth()/2, -world.getMap.getHeight()/2); ADD // batch.draw(map.render(), -map.getWidth()/2, -map.getHeight()/2); CON
<<<<<<< SEARCH SpriteBatch batch; OrthographicCamera cam; WorldManager world; @Override public void create () { batch = new SpriteBatch(); cam = new OrthographicCamera(50, 50); world = new WorldManager(25, 25); } ======= SpriteBatch batch; OrthographicCamera cam; @Override public void create () { batch = new SpriteBatch(); cam = new OrthographicCamera(50, 50); } >>>>>>> REPLACE <<<<<<< SEARCH batch.begin(); batch.draw(world.run(), -world.getMap.getWidth()/2, -world.getMap.getHeight()/2); batch.end(); ======= batch.begin(); // batch.draw(map.render(), -map.getWidth()/2, -map.getHeight()/2); batch.end(); >>>>>>> REPLACE
virtool/virtool
ad73789f74106a2d6014a2f737578494d2d21fbf
virtool/api/processes.py
python
mit
Remove specific process API GET endpoints
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/software_update") async def get_software_update(req): db = req.app["db"] document = await db.processes.find_one({"type": "software_update"}) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/hmm_install") async def get_hmm_install(req): db = req.app["db"] document = await db.processes.find_one({"type": "hmm_install"}) return json_response(virtool.utils.base_processor(document))
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document))
0
18
1
del_only
--- a/virtool/api/processes.py +++ b/virtool/api/processes.py @@ -25,19 +25 @@ return json_response(virtool.utils.base_processor(document)) - - [email protected]("/api/processes/software_update") -async def get_software_update(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "software_update"}) - - return json_response(virtool.utils.base_processor(document)) - - [email protected]("/api/processes/hmm_install") -async def get_hmm_install(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "hmm_install"}) - - return json_response(virtool.utils.base_processor(document))
--- a/virtool/api/processes.py +++ b/virtool/api/processes.py @@ ... @@ return json_response(virtool.utils.base_processor(document)) - - [email protected]("/api/processes/software_update") -async def get_software_update(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "software_update"}) - - return json_response(virtool.utils.base_processor(document)) - - [email protected]("/api/processes/hmm_install") -async def get_hmm_install(req): - db = req.app["db"] - - document = await db.processes.find_one({"type": "hmm_install"}) - - return json_response(virtool.utils.base_processor(document))
--- a/virtool/api/processes.py +++ b/virtool/api/processes.py @@ -25,19 +25 @@ CON return json_response(virtool.utils.base_processor(document)) DEL DEL DEL @routes.get("/api/processes/software_update") DEL async def get_software_update(req): DEL db = req.app["db"] DEL DEL document = await db.processes.find_one({"type": "software_update"}) DEL DEL return json_response(virtool.utils.base_processor(document)) DEL DEL DEL @routes.get("/api/processes/hmm_install") DEL async def get_hmm_install(req): DEL db = req.app["db"] DEL DEL document = await db.processes.find_one({"type": "hmm_install"}) DEL DEL return json_response(virtool.utils.base_processor(document))
<<<<<<< SEARCH return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/software_update") async def get_software_update(req): db = req.app["db"] document = await db.processes.find_one({"type": "software_update"}) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/hmm_install") async def get_hmm_install(req): db = req.app["db"] document = await db.processes.find_one({"type": "hmm_install"}) return json_response(virtool.utils.base_processor(document)) ======= return json_response(virtool.utils.base_processor(document)) >>>>>>> REPLACE
tdaede/mpv-rice
a84842ddc7c7149b085ba7791b3c46fec3d6ac7f
src/main.rs
rust
lgpl-2.1
Remove deprecated mpv option usage.
extern crate gtk; use gtk::prelude::*; use std::error::Error; use std::io::prelude::*; use std::fs::File; use std::path::Path; use std::env; fn main() { gtk::init().expect("Failed to initialize GTK."); let window = gtk::Window::new(gtk::WindowType::Toplevel); window.set_title("mpv-rice"); window.set_border_width(10); // window.set_window_position(gtk::WindowPosition::Center); window.set_default_size(350, 70); window.connect_delete_event(|_, _| { gtk::main_quit(); Inhibit(true) }); let button = gtk::Button::new_with_label("Write config"); button.connect_clicked(move |_| { let args: Vec<String> = env::args().collect(); let path = Path::new(&args[1]); let display = path.display(); let mut file = match File::create(&path) { Err(why) => panic!("couldn't create {}: {}", display, Error::description(&why)), Ok(file) => file, }; let _ = file.write_all(b"# generated by mpv-rice\n"); let _ = file.write_all(b"vo=opengl-hq\n"); }); window.add(&button); window.show_all(); gtk::main(); }
extern crate gtk; use gtk::prelude::*; use std::error::Error; use std::io::prelude::*; use std::fs::File; use std::path::Path; use std::env; fn main() { gtk::init().expect("Failed to initialize GTK."); let window = gtk::Window::new(gtk::WindowType::Toplevel); window.set_title("mpv-rice"); window.set_border_width(10); // window.set_window_position(gtk::WindowPosition::Center); window.set_default_size(350, 70); window.connect_delete_event(|_, _| { gtk::main_quit(); Inhibit(true) }); let button = gtk::Button::new_with_label("Write config"); button.connect_clicked(move |_| { let args: Vec<String> = env::args().collect(); let path = Path::new(&args[1]); let display = path.display(); let mut file = match File::create(&path) { Err(why) => panic!("couldn't create {}: {}", display, Error::description(&why)), Ok(file) => file, }; let _ = file.write_all(b"# generated by mpv-rice\n"); let _ = file.write_all(b"profile=opengl-hq\n"); }); window.add(&button); window.show_all(); gtk::main(); }
1
1
1
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -38,3 +38,3 @@ let _ = file.write_all(b"# generated by mpv-rice\n"); - let _ = file.write_all(b"vo=opengl-hq\n"); + let _ = file.write_all(b"profile=opengl-hq\n"); });
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ let _ = file.write_all(b"# generated by mpv-rice\n"); - let _ = file.write_all(b"vo=opengl-hq\n"); + let _ = file.write_all(b"profile=opengl-hq\n"); });
--- a/src/main.rs +++ b/src/main.rs @@ -38,3 +38,3 @@ CON let _ = file.write_all(b"# generated by mpv-rice\n"); DEL let _ = file.write_all(b"vo=opengl-hq\n"); ADD let _ = file.write_all(b"profile=opengl-hq\n"); CON });
<<<<<<< SEARCH }; let _ = file.write_all(b"# generated by mpv-rice\n"); let _ = file.write_all(b"vo=opengl-hq\n"); }); window.add(&button); ======= }; let _ = file.write_all(b"# generated by mpv-rice\n"); let _ = file.write_all(b"profile=opengl-hq\n"); }); window.add(&button); >>>>>>> REPLACE
hgschmie/presto
ca1d7307edd44e7f5b24d4fae6b8f8f6c1f8832e
presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java
java
apache-2.0
Convert anonymous class to lambda
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.orc; import io.prestosql.orc.metadata.statistics.ColumnStatistics; import java.util.Map; public interface OrcPredicate { OrcPredicate TRUE = new OrcPredicate() { @Override public boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex) { return true; } }; /** * Should the ORC reader process a file section with the specified statistics. * * @param numberOfRows the number of rows in the segment; this can be used with * {@code ColumnStatistics} to determine if a column is only null * @param statisticsByColumnIndex statistics for column by ordinal position * in the file; this will match the field order from the hive metastore */ boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex); }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.orc; import io.prestosql.orc.metadata.statistics.ColumnStatistics; import java.util.Map; public interface OrcPredicate { OrcPredicate TRUE = (numberOfRows, statisticsByColumnIndex) -> true; /** * Should the ORC reader process a file section with the specified statistics. * * @param numberOfRows the number of rows in the segment; this can be used with * {@code ColumnStatistics} to determine if a column is only null * @param statisticsByColumnIndex statistics for column by ordinal position * in the file; this will match the field order from the hive metastore */ boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex); }
1
8
1
mixed
--- a/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java @@ -21,10 +21,3 @@ { - OrcPredicate TRUE = new OrcPredicate() - { - @Override - public boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex) - { - return true; - } - }; + OrcPredicate TRUE = (numberOfRows, statisticsByColumnIndex) -> true;
--- a/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java @@ ... @@ { - OrcPredicate TRUE = new OrcPredicate() - { - @Override - public boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex) - { - return true; - } - }; + OrcPredicate TRUE = (numberOfRows, statisticsByColumnIndex) -> true;
--- a/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java +++ b/presto-orc/src/main/java/io/prestosql/orc/OrcPredicate.java @@ -21,10 +21,3 @@ CON { DEL OrcPredicate TRUE = new OrcPredicate() DEL { DEL @Override DEL public boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex) DEL { DEL return true; DEL } DEL }; ADD OrcPredicate TRUE = (numberOfRows, statisticsByColumnIndex) -> true; CON
<<<<<<< SEARCH public interface OrcPredicate { OrcPredicate TRUE = new OrcPredicate() { @Override public boolean matches(long numberOfRows, Map<Integer, ColumnStatistics> statisticsByColumnIndex) { return true; } }; /** ======= public interface OrcPredicate { OrcPredicate TRUE = (numberOfRows, statisticsByColumnIndex) -> true; /** >>>>>>> REPLACE
tracek/gee_asset_manager
8eca7b30865e4d02fd440f55ad3215dee6fab8a1
gee_asset_manager/batch_remover.py
python
apache-2.0
Add warning when removing an asset without full path
import fnmatch import logging import sys import ee def delete(asset_path): root = asset_path[:asset_path.rfind('/')] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
import fnmatch import logging import sys import ee def delete(asset_path): root_idx = asset_path.rfind('/') if root_idx == -1: logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') sys.exit(1) root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
5
1
1
mixed
--- a/gee_asset_manager/batch_remover.py +++ b/gee_asset_manager/batch_remover.py @@ -8,3 +8,7 @@ def delete(asset_path): - root = asset_path[:asset_path.rfind('/')] + root_idx = asset_path.rfind('/') + if root_idx == -1: + logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') + sys.exit(1) + root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})]
--- a/gee_asset_manager/batch_remover.py +++ b/gee_asset_manager/batch_remover.py @@ ... @@ def delete(asset_path): - root = asset_path[:asset_path.rfind('/')] + root_idx = asset_path.rfind('/') + if root_idx == -1: + logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') + sys.exit(1) + root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})]
--- a/gee_asset_manager/batch_remover.py +++ b/gee_asset_manager/batch_remover.py @@ -8,3 +8,7 @@ CON def delete(asset_path): DEL root = asset_path[:asset_path.rfind('/')] ADD root_idx = asset_path.rfind('/') ADD if root_idx == -1: ADD logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') ADD sys.exit(1) ADD root = asset_path[:root_idx] CON all_assets_names = [e['id'] for e in ee.data.getList({'id': root})]
<<<<<<< SEARCH def delete(asset_path): root = asset_path[:asset_path.rfind('/')] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) ======= def delete(asset_path): root_idx = asset_path.rfind('/') if root_idx == -1: logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') sys.exit(1) root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) >>>>>>> REPLACE
mikegehard/user-management-evolution-kotlin
b57aa414e1584911963940112eb0e502ec8b2b08
applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt
kotlin
mit
Use lateinit for autowired variables According to the Kotlin docs, this seems to be the preferred way to handle dependency injected instance variables. See: https://kotlinlang.org/docs/reference/properties.html#late-initialized-properties
package com.example.billing.reocurringPayments import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController @RestController class Controller { @Autowired private var paymentGateway: com.example.payments.Gateway? = null @Autowired private var counter: CounterService? = null @Autowired private var service: Service? = null @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service!!.thisMayFail() val response: ResponseEntity<String> if (paymentGateway!!.createReocurringPayment(data["amount"] as Int)) { counter!!.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
package com.example.billing.reocurringPayments import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController @RestController class Controller { @Autowired private lateinit var paymentGateway: com.example.payments.Gateway @Autowired private lateinit var counter: CounterService @Autowired private lateinit var service: Service @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders() responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { response = ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
6
6
2
mixed
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ -16,9 +16,9 @@ @Autowired - private var paymentGateway: com.example.payments.Gateway? = null + private lateinit var paymentGateway: com.example.payments.Gateway @Autowired - private var counter: CounterService? = null + private lateinit var counter: CounterService @Autowired - private var service: Service? = null + private lateinit var service: Service @@ -29,7 +29,7 @@ - service!!.thisMayFail() + service.thisMayFail() val response: ResponseEntity<String> - if (paymentGateway!!.createReocurringPayment(data["amount"] as Int)) { - counter!!.increment("billing.reocurringPayment.created") + if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { + counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED)
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ ... @@ @Autowired - private var paymentGateway: com.example.payments.Gateway? = null + private lateinit var paymentGateway: com.example.payments.Gateway @Autowired - private var counter: CounterService? = null + private lateinit var counter: CounterService @Autowired - private var service: Service? = null + private lateinit var service: Service @@ ... @@ - service!!.thisMayFail() + service.thisMayFail() val response: ResponseEntity<String> - if (paymentGateway!!.createReocurringPayment(data["amount"] as Int)) { - counter!!.increment("billing.reocurringPayment.created") + if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { + counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED)
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ -16,9 +16,9 @@ CON @Autowired DEL private var paymentGateway: com.example.payments.Gateway? = null ADD private lateinit var paymentGateway: com.example.payments.Gateway CON CON @Autowired DEL private var counter: CounterService? = null ADD private lateinit var counter: CounterService CON CON @Autowired DEL private var service: Service? = null ADD private lateinit var service: Service CON @@ -29,7 +29,7 @@ CON DEL service!!.thisMayFail() ADD service.thisMayFail() CON CON val response: ResponseEntity<String> DEL if (paymentGateway!!.createReocurringPayment(data["amount"] as Int)) { DEL counter!!.increment("billing.reocurringPayment.created") ADD if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { ADD counter.increment("billing.reocurringPayment.created") CON response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED)
<<<<<<< SEARCH class Controller { @Autowired private var paymentGateway: com.example.payments.Gateway? = null @Autowired private var counter: CounterService? = null @Autowired private var service: Service? = null @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) ======= class Controller { @Autowired private lateinit var paymentGateway: com.example.payments.Gateway @Autowired private lateinit var counter: CounterService @Autowired private lateinit var service: Service @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) >>>>>>> REPLACE <<<<<<< SEARCH responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service!!.thisMayFail() val response: ResponseEntity<String> if (paymentGateway!!.createReocurringPayment(data["amount"] as Int)) { counter!!.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { ======= responseHeaders.add("content-type", MediaType.APPLICATION_JSON.toString()) service.thisMayFail() val response: ResponseEntity<String> if (paymentGateway.createReocurringPayment(data["amount"] as Int)) { counter.increment("billing.reocurringPayment.created") response = ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { >>>>>>> REPLACE
JakeWharton/dex-method-list
3816b5f3848ff9d53f80f1b64c7fb65b4bf2a98d
diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt
kotlin
apache-2.0
Add label to signature table
package com.jakewharton.diffuse.diff import com.jakewharton.diffuse.Signatures import com.jakewharton.diffuse.diffuseTable import okio.ByteString internal class SignaturesDiff( val oldSignatures: Signatures, val newSignatures: Signatures ) { val changed = oldSignatures != newSignatures } internal fun SignaturesDiff.toDetailReport() = buildString { appendln() appendln(diffuseTable { header { row("", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { row("V1", oldSignatures.v1.joinToString("\n", transform = ByteString::hex), newSignatures.v1.joinToString("\n", transform = ByteString::hex)) } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { row("V2", oldSignatures.v2.joinToString("\n", transform = ByteString::hex), newSignatures.v2.joinToString("\n", transform = ByteString::hex)) } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { row("V3", oldSignatures.v3.joinToString("\n", transform = ByteString::hex), newSignatures.v3.joinToString("\n", transform = ByteString::hex)) } }) }
package com.jakewharton.diffuse.diff import com.jakewharton.diffuse.Signatures import com.jakewharton.diffuse.diffuseTable import com.jakewharton.picnic.TextAlignment.TopRight import okio.ByteString internal class SignaturesDiff( val oldSignatures: Signatures, val newSignatures: Signatures ) { val changed = oldSignatures != newSignatures } internal fun SignaturesDiff.toDetailReport() = buildString { appendln() appendln(diffuseTable { header { row("SIGNATURES", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { row { cell("V1") { alignment = TopRight } cell(oldSignatures.v1.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v1.joinToString("\n", transform = ByteString::hex)) } } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { row { cell("V2") { alignment = TopRight } cell(oldSignatures.v2.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v2.joinToString("\n", transform = ByteString::hex)) } } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { row { cell("V3") { alignment = TopRight } cell(oldSignatures.v3.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v3.joinToString("\n", transform = ByteString::hex)) } } }) }
23
10
2
mixed
--- a/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt +++ b/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt @@ -4,2 +4,3 @@ import com.jakewharton.diffuse.diffuseTable +import com.jakewharton.picnic.TextAlignment.TopRight import okio.ByteString @@ -17,18 +18,30 @@ header { - row("", "old", "new") + row("SIGNATURES", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { - row("V1", - oldSignatures.v1.joinToString("\n", transform = ByteString::hex), - newSignatures.v1.joinToString("\n", transform = ByteString::hex)) + row { + cell("V1") { + alignment = TopRight + } + cell(oldSignatures.v1.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v1.joinToString("\n", transform = ByteString::hex)) + } } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { - row("V2", - oldSignatures.v2.joinToString("\n", transform = ByteString::hex), - newSignatures.v2.joinToString("\n", transform = ByteString::hex)) + row { + cell("V2") { + alignment = TopRight + } + cell(oldSignatures.v2.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v2.joinToString("\n", transform = ByteString::hex)) + } } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { - row("V3", - oldSignatures.v3.joinToString("\n", transform = ByteString::hex), - newSignatures.v3.joinToString("\n", transform = ByteString::hex)) + row { + cell("V3") { + alignment = TopRight + } + cell(oldSignatures.v3.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v3.joinToString("\n", transform = ByteString::hex)) + } }
--- a/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt +++ b/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt @@ ... @@ import com.jakewharton.diffuse.diffuseTable +import com.jakewharton.picnic.TextAlignment.TopRight import okio.ByteString @@ ... @@ header { - row("", "old", "new") + row("SIGNATURES", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { - row("V1", - oldSignatures.v1.joinToString("\n", transform = ByteString::hex), - newSignatures.v1.joinToString("\n", transform = ByteString::hex)) + row { + cell("V1") { + alignment = TopRight + } + cell(oldSignatures.v1.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v1.joinToString("\n", transform = ByteString::hex)) + } } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { - row("V2", - oldSignatures.v2.joinToString("\n", transform = ByteString::hex), - newSignatures.v2.joinToString("\n", transform = ByteString::hex)) + row { + cell("V2") { + alignment = TopRight + } + cell(oldSignatures.v2.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v2.joinToString("\n", transform = ByteString::hex)) + } } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { - row("V3", - oldSignatures.v3.joinToString("\n", transform = ByteString::hex), - newSignatures.v3.joinToString("\n", transform = ByteString::hex)) + row { + cell("V3") { + alignment = TopRight + } + cell(oldSignatures.v3.joinToString("\n", transform = ByteString::hex)) + cell(newSignatures.v3.joinToString("\n", transform = ByteString::hex)) + } }
--- a/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt +++ b/diffuse/src/main/kotlin/com/jakewharton/diffuse/diff/SignaturesDiff.kt @@ -4,2 +4,3 @@ CON import com.jakewharton.diffuse.diffuseTable ADD import com.jakewharton.picnic.TextAlignment.TopRight CON import okio.ByteString @@ -17,18 +18,30 @@ CON header { DEL row("", "old", "new") ADD row("SIGNATURES", "old", "new") CON } CON if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { DEL row("V1", DEL oldSignatures.v1.joinToString("\n", transform = ByteString::hex), DEL newSignatures.v1.joinToString("\n", transform = ByteString::hex)) ADD row { ADD cell("V1") { ADD alignment = TopRight ADD } ADD cell(oldSignatures.v1.joinToString("\n", transform = ByteString::hex)) ADD cell(newSignatures.v1.joinToString("\n", transform = ByteString::hex)) ADD } CON } CON if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { DEL row("V2", DEL oldSignatures.v2.joinToString("\n", transform = ByteString::hex), DEL newSignatures.v2.joinToString("\n", transform = ByteString::hex)) ADD row { ADD cell("V2") { ADD alignment = TopRight ADD } ADD cell(oldSignatures.v2.joinToString("\n", transform = ByteString::hex)) ADD cell(newSignatures.v2.joinToString("\n", transform = ByteString::hex)) ADD } CON } CON if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { DEL row("V3", DEL oldSignatures.v3.joinToString("\n", transform = ByteString::hex), DEL newSignatures.v3.joinToString("\n", transform = ByteString::hex)) ADD row { ADD cell("V3") { ADD alignment = TopRight ADD } ADD cell(oldSignatures.v3.joinToString("\n", transform = ByteString::hex)) ADD cell(newSignatures.v3.joinToString("\n", transform = ByteString::hex)) ADD } CON }
<<<<<<< SEARCH import com.jakewharton.diffuse.Signatures import com.jakewharton.diffuse.diffuseTable import okio.ByteString ======= import com.jakewharton.diffuse.Signatures import com.jakewharton.diffuse.diffuseTable import com.jakewharton.picnic.TextAlignment.TopRight import okio.ByteString >>>>>>> REPLACE <<<<<<< SEARCH appendln(diffuseTable { header { row("", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { row("V1", oldSignatures.v1.joinToString("\n", transform = ByteString::hex), newSignatures.v1.joinToString("\n", transform = ByteString::hex)) } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { row("V2", oldSignatures.v2.joinToString("\n", transform = ByteString::hex), newSignatures.v2.joinToString("\n", transform = ByteString::hex)) } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { row("V3", oldSignatures.v3.joinToString("\n", transform = ByteString::hex), newSignatures.v3.joinToString("\n", transform = ByteString::hex)) } }) ======= appendln(diffuseTable { header { row("SIGNATURES", "old", "new") } if (oldSignatures.v1.isNotEmpty() || newSignatures.v1.isNotEmpty()) { row { cell("V1") { alignment = TopRight } cell(oldSignatures.v1.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v1.joinToString("\n", transform = ByteString::hex)) } } if (oldSignatures.v2.isNotEmpty() || newSignatures.v2.isNotEmpty()) { row { cell("V2") { alignment = TopRight } cell(oldSignatures.v2.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v2.joinToString("\n", transform = ByteString::hex)) } } if (oldSignatures.v3.isNotEmpty() || newSignatures.v3.isNotEmpty()) { row { cell("V3") { alignment = TopRight } cell(oldSignatures.v3.joinToString("\n", transform = ByteString::hex)) cell(newSignatures.v3.joinToString("\n", transform = ByteString::hex)) } } }) >>>>>>> REPLACE
google/android-fhir
aab389a177fa93ed6772c640ff38eebf6b3efeb9
datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt
kotlin
apache-2.0
Fix test case to make sure item control extension code is part of supported codes
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.fhir.datacapture import com.google.fhir.r4.core.Questionnaire internal const val ITEM_CONTROL_DROP_DOWN = "drop-down" internal const val EXTENSION_ITEM_CONTROL_URL = "http://hl7.org/fhir/StructureDefinition/questionnaire-itemControl" internal const val EXTENSION_ITEM_CONTROL_SYSTEM = "http://hl7.org/fhir/questionnaire-item-control" // Item control code as string or null internal val Questionnaire.Item.itemControl: String? get() { this.extensionList.forEach { if (it.url.value.equals(EXTENSION_ITEM_CONTROL_URL)) { it.value.codeableConcept.codingList.forEach { if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { return it.code.value } } } } return null }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.fhir.datacapture import com.google.fhir.r4.core.Questionnaire internal const val ITEM_CONTROL_DROP_DOWN = "drop-down" internal const val EXTENSION_ITEM_CONTROL_URL = "http://hl7.org/fhir/StructureDefinition/questionnaire-itemControl" internal const val EXTENSION_ITEM_CONTROL_SYSTEM = "http://hl7.org/fhir/questionnaire-item-control" // Item control code as string or null internal val Questionnaire.Item.itemControl: String? get() { this.extensionList.forEach { if (it.url.value.equals(EXTENSION_ITEM_CONTROL_URL)) { it.value.codeableConcept.codingList.forEach { if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { return when (it.code.value) { ITEM_CONTROL_DROP_DOWN -> ITEM_CONTROL_DROP_DOWN else -> null } } } } } return null }
4
1
1
mixed
--- a/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt +++ b/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt @@ -33,3 +33,6 @@ if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { - return it.code.value + return when (it.code.value) { + ITEM_CONTROL_DROP_DOWN -> ITEM_CONTROL_DROP_DOWN + else -> null + } }
--- a/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt +++ b/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt @@ ... @@ if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { - return it.code.value + return when (it.code.value) { + ITEM_CONTROL_DROP_DOWN -> ITEM_CONTROL_DROP_DOWN + else -> null + } }
--- a/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt +++ b/datacapture/src/main/java/com/google/android/fhir/datacapture/MoreQuestionnaireItemExtensions.kt @@ -33,3 +33,6 @@ CON if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { DEL return it.code.value ADD return when (it.code.value) { ADD ITEM_CONTROL_DROP_DOWN -> ITEM_CONTROL_DROP_DOWN ADD else -> null ADD } CON }
<<<<<<< SEARCH it.value.codeableConcept.codingList.forEach { if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { return it.code.value } } ======= it.value.codeableConcept.codingList.forEach { if (it.system.value.equals(EXTENSION_ITEM_CONTROL_SYSTEM)) { return when (it.code.value) { ITEM_CONTROL_DROP_DOWN -> ITEM_CONTROL_DROP_DOWN else -> null } } } >>>>>>> REPLACE
Ruben-Sten/TeXiFy-IDEA
bc680b2d0d88196c81d362c6c9c8a3f8af53e0a7
src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt
kotlin
mit
Remove check which does not do forward search in preamble because this is unnecessary (forward search works fine) and behaves incorrectly (at least in one case).
package nl.rubensten.texifyidea.run.evince import com.intellij.execution.runners.ExecutionEnvironment import nl.rubensten.texifyidea.TeXception import nl.rubensten.texifyidea.psi.LatexEnvironment import nl.rubensten.texifyidea.run.LatexRunConfiguration import nl.rubensten.texifyidea.util.* import org.jetbrains.concurrency.runAsync /** * Provides forward search for Evince. */ class EvinceForwardSearch { /** * Execute forward search based on the given environment. */ fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) { run { val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run val document = psiFile.document() ?: return@run val editor = psiFile.openedEditor() ?: return@run if (document != editor.document) { return@run } // Do not do forward search when editing the preamble. if (psiFile.isRoot()) { val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run val environment = element.parentOfType(LatexEnvironment::class) ?: return@run if (environment.name()?.text != "document") { return@run } } val line = document.getLineNumber(editor.caretOffset()) + 1 runAsync { try { // This will start Evince if it is not running yet EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line) } catch (ignored: TeXception) { } } } } }
package nl.rubensten.texifyidea.run.evince import com.intellij.execution.runners.ExecutionEnvironment import nl.rubensten.texifyidea.TeXception import nl.rubensten.texifyidea.psi.LatexEnvironment import nl.rubensten.texifyidea.run.LatexRunConfiguration import nl.rubensten.texifyidea.util.* import org.jetbrains.concurrency.runAsync /** * Provides forward search for Evince. */ class EvinceForwardSearch { /** * Execute forward search based on the given environment. */ fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) { run { val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run val document = psiFile.document() ?: return@run val editor = psiFile.openedEditor() ?: return@run if (document != editor.document) { return@run } val line = document.getLineNumber(editor.caretOffset()) + 1 runAsync { try { // This will start Evince if it is not running yet EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line) } catch (ignored: TeXception) { } } } } }
0
9
1
del_only
--- a/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt +++ b/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt @@ -27,11 +27,2 @@ - // Do not do forward search when editing the preamble. - if (psiFile.isRoot()) { - val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run - val environment = element.parentOfType(LatexEnvironment::class) ?: return@run - if (environment.name()?.text != "document") { - return@run - } - } - val line = document.getLineNumber(editor.caretOffset()) + 1
--- a/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt +++ b/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt @@ ... @@ - // Do not do forward search when editing the preamble. - if (psiFile.isRoot()) { - val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run - val environment = element.parentOfType(LatexEnvironment::class) ?: return@run - if (environment.name()?.text != "document") { - return@run - } - } - val line = document.getLineNumber(editor.caretOffset()) + 1
--- a/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt +++ b/src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt @@ -27,11 +27,2 @@ CON DEL // Do not do forward search when editing the preamble. DEL if (psiFile.isRoot()) { DEL val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run DEL val environment = element.parentOfType(LatexEnvironment::class) ?: return@run DEL if (environment.name()?.text != "document") { DEL return@run DEL } DEL } DEL CON val line = document.getLineNumber(editor.caretOffset()) + 1
<<<<<<< SEARCH } // Do not do forward search when editing the preamble. if (psiFile.isRoot()) { val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run val environment = element.parentOfType(LatexEnvironment::class) ?: return@run if (environment.name()?.text != "document") { return@run } } val line = document.getLineNumber(editor.caretOffset()) + 1 ======= } val line = document.getLineNumber(editor.caretOffset()) + 1 >>>>>>> REPLACE
Abica/piston-examples
a0b048a0d8df16b657f0195b2096fac5d7a13540
image/src/main.rs
rust
mit
Fix old_path import in image example
#![feature(old_path)] extern crate piston; extern crate graphics; extern crate sdl2_window; extern crate opengl_graphics; use std::cell::RefCell; use opengl_graphics::{ GlGraphics, OpenGL, Texture, }; use sdl2_window::Sdl2Window; fn main() { let opengl = OpenGL::_3_2; let window = Sdl2Window::new( opengl, piston::window::WindowSettings { title: "Image".to_string(), size: [300, 300], fullscreen: false, exit_on_esc: true, samples: 0, } ); let image = Path::new("./bin/assets/rust-logo.png"); let image = Texture::from_path(&image).unwrap(); let ref mut gl = GlGraphics::new(opengl); let window = RefCell::new(window); for e in piston::events(&window) { use piston::event::RenderEvent; if let Some(args) = e.render_args() { use graphics::*; gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { graphics::clear([1.0; 4], gl); graphics::image(&image, c.transform, gl); }); }; } }
#![feature(old_path)] extern crate piston; extern crate graphics; extern crate sdl2_window; extern crate opengl_graphics; use std::old_path::*; use std::cell::RefCell; use opengl_graphics::{ GlGraphics, OpenGL, Texture, }; use sdl2_window::Sdl2Window; fn main() { let opengl = OpenGL::_3_2; let window = Sdl2Window::new( opengl, piston::window::WindowSettings { title: "Image".to_string(), size: [300, 300], fullscreen: false, exit_on_esc: true, samples: 0, } ); let rust_logo = Path::new("./bin/assets/rust-logo.png"); let rust_logo = Texture::from_path(&rust_logo).unwrap(); let ref mut gl = GlGraphics::new(opengl); let window = RefCell::new(window); for e in piston::events(&window) { use piston::event::*; if let Some(args) = e.render_args() { use graphics::*; gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { clear([1.0; 4], gl); image(&rust_logo, c.transform, gl); }); }; } }
8
5
4
mixed
--- a/image/src/main.rs +++ b/image/src/main.rs @@ -6,2 +6,4 @@ extern crate opengl_graphics; + +use std::old_path::*; @@ -28,4 +30,4 @@ - let image = Path::new("./bin/assets/rust-logo.png"); - let image = Texture::from_path(&image).unwrap(); + let rust_logo = Path::new("./bin/assets/rust-logo.png"); + let rust_logo = Texture::from_path(&rust_logo).unwrap(); let ref mut gl = GlGraphics::new(opengl); @@ -33,3 +35,3 @@ for e in piston::events(&window) { - use piston::event::RenderEvent; + use piston::event::*; @@ -37,5 +39,6 @@ use graphics::*; + gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { - graphics::clear([1.0; 4], gl); - graphics::image(&image, c.transform, gl); + clear([1.0; 4], gl); + image(&rust_logo, c.transform, gl); });
--- a/image/src/main.rs +++ b/image/src/main.rs @@ ... @@ extern crate opengl_graphics; + +use std::old_path::*; @@ ... @@ - let image = Path::new("./bin/assets/rust-logo.png"); - let image = Texture::from_path(&image).unwrap(); + let rust_logo = Path::new("./bin/assets/rust-logo.png"); + let rust_logo = Texture::from_path(&rust_logo).unwrap(); let ref mut gl = GlGraphics::new(opengl); @@ ... @@ for e in piston::events(&window) { - use piston::event::RenderEvent; + use piston::event::*; @@ ... @@ use graphics::*; + gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { - graphics::clear([1.0; 4], gl); - graphics::image(&image, c.transform, gl); + clear([1.0; 4], gl); + image(&rust_logo, c.transform, gl); });
--- a/image/src/main.rs +++ b/image/src/main.rs @@ -6,2 +6,4 @@ CON extern crate opengl_graphics; ADD ADD use std::old_path::*; CON @@ -28,4 +30,4 @@ CON DEL let image = Path::new("./bin/assets/rust-logo.png"); DEL let image = Texture::from_path(&image).unwrap(); ADD let rust_logo = Path::new("./bin/assets/rust-logo.png"); ADD let rust_logo = Texture::from_path(&rust_logo).unwrap(); CON let ref mut gl = GlGraphics::new(opengl); @@ -33,3 +35,3 @@ CON for e in piston::events(&window) { DEL use piston::event::RenderEvent; ADD use piston::event::*; CON @@ -37,5 +39,6 @@ CON use graphics::*; ADD CON gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { DEL graphics::clear([1.0; 4], gl); DEL graphics::image(&image, c.transform, gl); ADD clear([1.0; 4], gl); ADD image(&rust_logo, c.transform, gl); CON });
<<<<<<< SEARCH extern crate sdl2_window; extern crate opengl_graphics; use std::cell::RefCell; ======= extern crate sdl2_window; extern crate opengl_graphics; use std::old_path::*; use std::cell::RefCell; >>>>>>> REPLACE <<<<<<< SEARCH ); let image = Path::new("./bin/assets/rust-logo.png"); let image = Texture::from_path(&image).unwrap(); let ref mut gl = GlGraphics::new(opengl); let window = RefCell::new(window); for e in piston::events(&window) { use piston::event::RenderEvent; if let Some(args) = e.render_args() { use graphics::*; gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { graphics::clear([1.0; 4], gl); graphics::image(&image, c.transform, gl); }); }; ======= ); let rust_logo = Path::new("./bin/assets/rust-logo.png"); let rust_logo = Texture::from_path(&rust_logo).unwrap(); let ref mut gl = GlGraphics::new(opengl); let window = RefCell::new(window); for e in piston::events(&window) { use piston::event::*; if let Some(args) = e.render_args() { use graphics::*; gl.draw([0, 0, args.width as i32, args.height as i32], |c, gl| { clear([1.0; 4], gl); image(&rust_logo, c.transform, gl); }); }; >>>>>>> REPLACE
smarturban/sstu_schedule
c076d54e4dd1200724cb5e28ebed2711af466838
src/main/java/com/ssu/schedule/model/Day.java
java
apache-2.0
fix(date): Update date for new study year
package com.ssu.schedule.model; public class Day { private String start = "01.09.2016"; private String end = "01.07.2017"; private int weekday; private int week = 0; public void setWeekday(int weekday) { this.weekday = weekday; } public void setWeek(int week) { this.week = week; } public int getWeekday() { return weekday; } public int getWeek() { return week; } public String getStart() { return start; } public void setStart(String start) { this.start = start; } public String getEnd() { return end; } public void setEnd(String end) { this.end = end; } }
package com.ssu.schedule.model; public class Day { private String start = "01.09.2017"; private String end = "01.07.2018"; private int weekday; private int week = 0; public void setWeekday(int weekday) { this.weekday = weekday; } public void setWeek(int week) { this.week = week; } public int getWeekday() { return weekday; } public int getWeek() { return week; } public String getStart() { return start; } public void setStart(String start) { this.start = start; } public String getEnd() { return end; } public void setEnd(String end) { this.end = end; } }
2
2
1
mixed
--- a/src/main/java/com/ssu/schedule/model/Day.java +++ b/src/main/java/com/ssu/schedule/model/Day.java @@ -3,4 +3,4 @@ public class Day { - private String start = "01.09.2016"; - private String end = "01.07.2017"; + private String start = "01.09.2017"; + private String end = "01.07.2018"; private int weekday;
--- a/src/main/java/com/ssu/schedule/model/Day.java +++ b/src/main/java/com/ssu/schedule/model/Day.java @@ ... @@ public class Day { - private String start = "01.09.2016"; - private String end = "01.07.2017"; + private String start = "01.09.2017"; + private String end = "01.07.2018"; private int weekday;
--- a/src/main/java/com/ssu/schedule/model/Day.java +++ b/src/main/java/com/ssu/schedule/model/Day.java @@ -3,4 +3,4 @@ CON public class Day { DEL private String start = "01.09.2016"; DEL private String end = "01.07.2017"; ADD private String start = "01.09.2017"; ADD private String end = "01.07.2018"; CON private int weekday;
<<<<<<< SEARCH public class Day { private String start = "01.09.2016"; private String end = "01.07.2017"; private int weekday; ======= public class Day { private String start = "01.09.2017"; private String end = "01.07.2018"; private int weekday; >>>>>>> REPLACE
inejge/ldap3
3c58b124a8ad6b84a3082844fa33833cd6fc3c82
protocol/src/lib.rs
rust
apache-2.0
Use if let because why not
extern crate byteorder; pub mod ber; pub mod error; use ber::common; use ber::types::ASNType; pub type Result<T> = std::result::Result<T, error::LDAPError>; pub fn build_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { let msgidtag = msgid.into_ber_universal(); let plvec = if controls.is_some() { vec![msgidtag, protocolOp, controls.unwrap()] } else { vec![msgidtag, protocolOp] }; plvec.into_ber_universal() } pub fn unwrap_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> { let common::Tag { _value, .. } = envelope; let mut tagvec = match _value { common::Payload::Constructed(e) => e, common::Payload::Primitive(_) => { return Err(error::LDAPError::Protocol) }, }; if tagvec.len() < 3 || tagvec.len() > 2 { return Err(error::LDAPError::Protocol) } let mut msgidtag = tagvec.pop().unwrap(); let protocolOp = tagvec.pop().unwrap(); let controls = tagvec.pop(); let msgid = match i32::from_tag(&mut msgidtag) { Some(e) => e, None => return Err(error::LDAPError::Protocol), }; Ok((msgid, protocolOp, controls)) }
extern crate byteorder; pub mod ber; pub mod error; use ber::common; use ber::types::ASNType; pub type Result<T> = std::result::Result<T, error::LDAPError>; pub fn construct_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { let msgidtag = msgid.into_ber_universal(); let plvec = if let Some(controls) = controls { vec![msgidtag, protocolOp, controls] } else { vec![msgidtag, protocolOp] }; plvec.into_ber_universal() } pub fn deconstruct_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> { let common::Tag { _value, .. } = envelope; let mut tagvec = match _value { common::Payload::Constructed(e) => e, common::Payload::Primitive(_) => { return Err(error::LDAPError::Protocol) }, }; if tagvec.len() < 3 || tagvec.len() > 2 { return Err(error::LDAPError::Protocol) } let mut msgidtag = tagvec.pop().unwrap(); let protocolOp = tagvec.pop().unwrap(); let controls = tagvec.pop(); let msgid = match i32::from_tag(&mut msgidtag) { Some(e) => e, None => return Err(error::LDAPError::Protocol), }; Ok((msgid, protocolOp, controls)) }
5
4
3
mixed
--- a/protocol/src/lib.rs +++ b/protocol/src/lib.rs @@ -10,3 +10,3 @@ -pub fn build_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag +pub fn construct_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { @@ -15,4 +15,5 @@ - let plvec = if controls.is_some() { - vec![msgidtag, protocolOp, controls.unwrap()] } + let plvec = if let Some(controls) = controls { + vec![msgidtag, protocolOp, controls] + } else { @@ -24,3 +25,3 @@ -pub fn unwrap_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> +pub fn deconstruct_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> {
--- a/protocol/src/lib.rs +++ b/protocol/src/lib.rs @@ ... @@ -pub fn build_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag +pub fn construct_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { @@ ... @@ - let plvec = if controls.is_some() { - vec![msgidtag, protocolOp, controls.unwrap()] } + let plvec = if let Some(controls) = controls { + vec![msgidtag, protocolOp, controls] + } else { @@ ... @@ -pub fn unwrap_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> +pub fn deconstruct_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> {
--- a/protocol/src/lib.rs +++ b/protocol/src/lib.rs @@ -10,3 +10,3 @@ CON DEL pub fn build_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag ADD pub fn construct_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag CON { @@ -15,4 +15,5 @@ CON DEL let plvec = if controls.is_some() { DEL vec![msgidtag, protocolOp, controls.unwrap()] } ADD let plvec = if let Some(controls) = controls { ADD vec![msgidtag, protocolOp, controls] ADD } CON else { @@ -24,3 +25,3 @@ CON DEL pub fn unwrap_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> ADD pub fn deconstruct_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> CON {
<<<<<<< SEARCH pub type Result<T> = std::result::Result<T, error::LDAPError>; pub fn build_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { let msgidtag = msgid.into_ber_universal(); let plvec = if controls.is_some() { vec![msgidtag, protocolOp, controls.unwrap()] } else { vec![msgidtag, protocolOp] ======= pub type Result<T> = std::result::Result<T, error::LDAPError>; pub fn construct_envelope(msgid: i32, protocolOp: common::Tag, controls: Option<common::Tag>) -> common::Tag { let msgidtag = msgid.into_ber_universal(); let plvec = if let Some(controls) = controls { vec![msgidtag, protocolOp, controls] } else { vec![msgidtag, protocolOp] >>>>>>> REPLACE <<<<<<< SEARCH } pub fn unwrap_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> { let common::Tag { _value, .. } = envelope; ======= } pub fn deconstruct_envelope(envelope: common::Tag) -> Result<(i32, common::Tag, Option<common::Tag>)> { let common::Tag { _value, .. } = envelope; >>>>>>> REPLACE
MitocGroup/aws-sdk-js
225d94b2aa021213165d5780118290443a1649a1
lib/services/cloudsearchdomain.js
javascript
apache-2.0
Add constructor and customized class documentation
var AWS = require('../core'); /** * Note: the `AWS.CloudSearchDomain` constructor must be created with a * valid endpoint. */ AWS.util.update(AWS.CloudSearchDomain.prototype, { /** * @api private */ validateService: function validateService() { if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { var msg = 'AWS.CloudSearchDomain requires an explicit '+ '`endpoint\' configuration option.'; throw AWS.util.error(new Error(), {name: 'InvalidEndpoint', message: msg}); } } });
var AWS = require('../core'); /** * Constructs a service interface object. Each API operation is exposed as a * function on service. * * ### Sending a Request Using CloudSearchDomain * * ```javascript * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * csd.search(params, function (err, data) { * if (err) console.log(err, err.stack); // an error occurred * else console.log(data); // successful response * }); * ``` * * ### Locking the API Version * * In order to ensure that the CloudSearchDomain object uses this specific API, * you can construct the object by passing the `apiVersion` option to the * constructor: * * ```javascript * var csd = new AWS.CloudSearchDomain({ * endpoint: 'my.host.tld', * apiVersion: '2013-01-01' * }); * ``` * * You can also set the API version globally in `AWS.config.apiVersions` using * the **cloudsearchdomain** service identifier: * * ```javascript * AWS.config.apiVersions = { * cloudsearchdomain: '2013-01-01', * // other service API versions * }; * * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * ``` * * @note You *must* provide an `endpoint` configuration parameter when * constructing this service. See {constructor} for more information. * * @!method constructor(options = {}) * Constructs a service object. This object has one method for each * API operation. * * @example Constructing a CloudSearchDomain object * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * @note You *must* provide an `endpoint` when constructing this service. * @option (see AWS.Config.constructor) * * @service cloudsearchdomain * @version 2013-01-01 */ AWS.util.update(AWS.CloudSearchDomain.prototype, { /** * @api private */ validateService: function validateService() { if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { var msg = 'AWS.CloudSearchDomain requires an explicit '+ '`endpoint\' configuration option.'; throw AWS.util.error(new Error(), {name: 'InvalidEndpoint', message: msg}); } } });
52
2
1
mixed
--- a/lib/services/cloudsearchdomain.js +++ b/lib/services/cloudsearchdomain.js @@ -3,4 +3,54 @@ /** - * Note: the `AWS.CloudSearchDomain` constructor must be created with a - * valid endpoint. + * Constructs a service interface object. Each API operation is exposed as a + * function on service. + * + * ### Sending a Request Using CloudSearchDomain + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * csd.search(params, function (err, data) { + * if (err) console.log(err, err.stack); // an error occurred + * else console.log(data); // successful response + * }); + * ``` + * + * ### Locking the API Version + * + * In order to ensure that the CloudSearchDomain object uses this specific API, + * you can construct the object by passing the `apiVersion` option to the + * constructor: + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({ + * endpoint: 'my.host.tld', + * apiVersion: '2013-01-01' + * }); + * ``` + * + * You can also set the API version globally in `AWS.config.apiVersions` using + * the **cloudsearchdomain** service identifier: + * + * ```javascript + * AWS.config.apiVersions = { + * cloudsearchdomain: '2013-01-01', + * // other service API versions + * }; + * + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * ``` + * + * @note You *must* provide an `endpoint` configuration parameter when + * constructing this service. See {constructor} for more information. + * + * @!method constructor(options = {}) + * Constructs a service object. This object has one method for each + * API operation. + * + * @example Constructing a CloudSearchDomain object + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * @note You *must* provide an `endpoint` when constructing this service. + * @option (see AWS.Config.constructor) + * + * @service cloudsearchdomain + * @version 2013-01-01 */
--- a/lib/services/cloudsearchdomain.js +++ b/lib/services/cloudsearchdomain.js @@ ... @@ /** - * Note: the `AWS.CloudSearchDomain` constructor must be created with a - * valid endpoint. + * Constructs a service interface object. Each API operation is exposed as a + * function on service. + * + * ### Sending a Request Using CloudSearchDomain + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * csd.search(params, function (err, data) { + * if (err) console.log(err, err.stack); // an error occurred + * else console.log(data); // successful response + * }); + * ``` + * + * ### Locking the API Version + * + * In order to ensure that the CloudSearchDomain object uses this specific API, + * you can construct the object by passing the `apiVersion` option to the + * constructor: + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({ + * endpoint: 'my.host.tld', + * apiVersion: '2013-01-01' + * }); + * ``` + * + * You can also set the API version globally in `AWS.config.apiVersions` using + * the **cloudsearchdomain** service identifier: + * + * ```javascript + * AWS.config.apiVersions = { + * cloudsearchdomain: '2013-01-01', + * // other service API versions + * }; + * + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * ``` + * + * @note You *must* provide an `endpoint` configuration parameter when + * constructing this service. See {constructor} for more information. + * + * @!method constructor(options = {}) + * Constructs a service object. This object has one method for each + * API operation. + * + * @example Constructing a CloudSearchDomain object + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * @note You *must* provide an `endpoint` when constructing this service. + * @option (see AWS.Config.constructor) + * + * @service cloudsearchdomain + * @version 2013-01-01 */
--- a/lib/services/cloudsearchdomain.js +++ b/lib/services/cloudsearchdomain.js @@ -3,4 +3,54 @@ CON /** DEL * Note: the `AWS.CloudSearchDomain` constructor must be created with a DEL * valid endpoint. ADD * Constructs a service interface object. Each API operation is exposed as a ADD * function on service. ADD * ADD * ### Sending a Request Using CloudSearchDomain ADD * ADD * ```javascript ADD * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); ADD * csd.search(params, function (err, data) { ADD * if (err) console.log(err, err.stack); // an error occurred ADD * else console.log(data); // successful response ADD * }); ADD * ``` ADD * ADD * ### Locking the API Version ADD * ADD * In order to ensure that the CloudSearchDomain object uses this specific API, ADD * you can construct the object by passing the `apiVersion` option to the ADD * constructor: ADD * ADD * ```javascript ADD * var csd = new AWS.CloudSearchDomain({ ADD * endpoint: 'my.host.tld', ADD * apiVersion: '2013-01-01' ADD * }); ADD * ``` ADD * ADD * You can also set the API version globally in `AWS.config.apiVersions` using ADD * the **cloudsearchdomain** service identifier: ADD * ADD * ```javascript ADD * AWS.config.apiVersions = { ADD * cloudsearchdomain: '2013-01-01', ADD * // other service API versions ADD * }; ADD * ADD * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); ADD * ``` ADD * ADD * @note You *must* provide an `endpoint` configuration parameter when ADD * constructing this service. See {constructor} for more information. ADD * ADD * @!method constructor(options = {}) ADD * Constructs a service object. This object has one method for each ADD * API operation. ADD * ADD * @example Constructing a CloudSearchDomain object ADD * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); ADD * @note You *must* provide an `endpoint` when constructing this service. ADD * @option (see AWS.Config.constructor) ADD * ADD * @service cloudsearchdomain ADD * @version 2013-01-01 CON */
<<<<<<< SEARCH /** * Note: the `AWS.CloudSearchDomain` constructor must be created with a * valid endpoint. */ AWS.util.update(AWS.CloudSearchDomain.prototype, { ======= /** * Constructs a service interface object. Each API operation is exposed as a * function on service. * * ### Sending a Request Using CloudSearchDomain * * ```javascript * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * csd.search(params, function (err, data) { * if (err) console.log(err, err.stack); // an error occurred * else console.log(data); // successful response * }); * ``` * * ### Locking the API Version * * In order to ensure that the CloudSearchDomain object uses this specific API, * you can construct the object by passing the `apiVersion` option to the * constructor: * * ```javascript * var csd = new AWS.CloudSearchDomain({ * endpoint: 'my.host.tld', * apiVersion: '2013-01-01' * }); * ``` * * You can also set the API version globally in `AWS.config.apiVersions` using * the **cloudsearchdomain** service identifier: * * ```javascript * AWS.config.apiVersions = { * cloudsearchdomain: '2013-01-01', * // other service API versions * }; * * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * ``` * * @note You *must* provide an `endpoint` configuration parameter when * constructing this service. See {constructor} for more information. * * @!method constructor(options = {}) * Constructs a service object. This object has one method for each * API operation. * * @example Constructing a CloudSearchDomain object * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); * @note You *must* provide an `endpoint` when constructing this service. * @option (see AWS.Config.constructor) * * @service cloudsearchdomain * @version 2013-01-01 */ AWS.util.update(AWS.CloudSearchDomain.prototype, { >>>>>>> REPLACE
homeworkprod/byceps
11fd3e5c8a2c7f591dff1ba1949508d178d1e5d5
byceps/util/irc.py
python
bsd-3-clause
Make IRC message delay configurable
""" byceps.util.irc ~~~~~~~~~~~~~~~ Send IRC messages to a bot via HTTP. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from time import sleep from typing import List from flask import current_app import requests DEFAULT_BOT_URL = 'http://127.0.0.1:12345/' DEFAULT_ENABLED = False DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' def send_message(channels: List[str], text: str) -> None: """Write the text to the channels by sending it to the bot via HTTP.""" enabled = current_app.config.get('ANNOUNCE_IRC_ENABLED', DEFAULT_ENABLED) if not enabled: current_app.logger.warning('Announcements on IRC are disabled.') return text_prefix = current_app.config.get( 'ANNOUNCE_IRC_TEXT_PREFIX', DEFAULT_TEXT_PREFIX ) text = text_prefix + text url = current_app.config.get('IRC_BOT_URL', DEFAULT_BOT_URL) data = {'channels': channels, 'text': text} # Delay a bit as an attempt to avoid getting kicked from server # because of flooding. sleep(DELAY_IN_SECONDS) requests.post(url, json=data) # Ignore response code for now.
""" byceps.util.irc ~~~~~~~~~~~~~~~ Send IRC messages to a bot via HTTP. :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from time import sleep from typing import List from flask import current_app import requests DEFAULT_BOT_URL = 'http://127.0.0.1:12345/' DEFAULT_ENABLED = False DEFAULT_DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' def send_message(channels: List[str], text: str) -> None: """Write the text to the channels by sending it to the bot via HTTP.""" enabled = current_app.config.get('ANNOUNCE_IRC_ENABLED', DEFAULT_ENABLED) if not enabled: current_app.logger.warning('Announcements on IRC are disabled.') return text_prefix = current_app.config.get( 'ANNOUNCE_IRC_TEXT_PREFIX', DEFAULT_TEXT_PREFIX ) text = text_prefix + text url = current_app.config.get('IRC_BOT_URL', DEFAULT_BOT_URL) data = {'channels': channels, 'text': text} # Delay a bit as an attempt to avoid getting kicked from server # because of flooding. delay = int( current_app.config.get('ANNOUNCE_IRC_DELAY', DEFAULT_DELAY_IN_SECONDS) ) sleep(delay) requests.post(url, json=data) # Ignore response code for now.
5
4
2
mixed
--- a/byceps/util/irc.py +++ b/byceps/util/irc.py @@ -19,5 +19,3 @@ DEFAULT_ENABLED = False - -DELAY_IN_SECONDS = 2 - +DEFAULT_DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' @@ -43,3 +41,6 @@ # because of flooding. - sleep(DELAY_IN_SECONDS) + delay = int( + current_app.config.get('ANNOUNCE_IRC_DELAY', DEFAULT_DELAY_IN_SECONDS) + ) + sleep(delay)
--- a/byceps/util/irc.py +++ b/byceps/util/irc.py @@ ... @@ DEFAULT_ENABLED = False - -DELAY_IN_SECONDS = 2 - +DEFAULT_DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' @@ ... @@ # because of flooding. - sleep(DELAY_IN_SECONDS) + delay = int( + current_app.config.get('ANNOUNCE_IRC_DELAY', DEFAULT_DELAY_IN_SECONDS) + ) + sleep(delay)
--- a/byceps/util/irc.py +++ b/byceps/util/irc.py @@ -19,5 +19,3 @@ CON DEFAULT_ENABLED = False DEL DEL DELAY_IN_SECONDS = 2 DEL ADD DEFAULT_DELAY_IN_SECONDS = 2 CON DEFAULT_TEXT_PREFIX = '[BYCEPS] ' @@ -43,3 +41,6 @@ CON # because of flooding. DEL sleep(DELAY_IN_SECONDS) ADD delay = int( ADD current_app.config.get('ANNOUNCE_IRC_DELAY', DEFAULT_DELAY_IN_SECONDS) ADD ) ADD sleep(delay) CON
<<<<<<< SEARCH DEFAULT_BOT_URL = 'http://127.0.0.1:12345/' DEFAULT_ENABLED = False DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' ======= DEFAULT_BOT_URL = 'http://127.0.0.1:12345/' DEFAULT_ENABLED = False DEFAULT_DELAY_IN_SECONDS = 2 DEFAULT_TEXT_PREFIX = '[BYCEPS] ' >>>>>>> REPLACE <<<<<<< SEARCH # Delay a bit as an attempt to avoid getting kicked from server # because of flooding. sleep(DELAY_IN_SECONDS) requests.post(url, json=data) # Ignore response code for now. ======= # Delay a bit as an attempt to avoid getting kicked from server # because of flooding. delay = int( current_app.config.get('ANNOUNCE_IRC_DELAY', DEFAULT_DELAY_IN_SECONDS) ) sleep(delay) requests.post(url, json=data) # Ignore response code for now. >>>>>>> REPLACE
superdesk/liveblog
c52a8db8276e1f6c49c427bb23547e3f65e7f257
client/app/scripts/liveblog-syndication/directives/incoming-syndication.js
javascript
agpl-3.0
Remove amount of round trip to the server
liveblogSyndication .directive('lbIncomingSyndication', ['$routeParams', 'IncomingSyndicationActions', 'IncomingSyndicationReducers', 'Store', function($routeParams, IncomingSyndicationActions, IncomingSyndicationReducers, Store) { return { templateUrl: 'scripts/liveblog-syndication/views/incoming-syndication.html', scope: { lbPostsOnPostSelected: '=', openPanel: '=', syndId: '=' }, link: function(scope) { scope.blogId = $routeParams._id; scope.store = new Store(IncomingSyndicationReducers, { posts: {}, syndication: {} }); scope.store.connect(function(state) { scope.posts = state.posts; scope.syndication = state.syndication; }); scope.goBack = function() { scope.openPanel('ingest', null); }; IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); IncomingSyndicationActions .getSyndication(scope.syndId); // On incoming post, we reload all the posts. // Not very fast, but easy to setup scope.$on('posts', function() { IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); }); scope.$on('$destroy', scope.store.destroy); } }; }]);
liveblogSyndication .directive('lbIncomingSyndication', ['$routeParams', 'IncomingSyndicationActions', 'IncomingSyndicationReducers', 'Store', function($routeParams, IncomingSyndicationActions, IncomingSyndicationReducers, Store) { return { templateUrl: 'scripts/liveblog-syndication/views/incoming-syndication.html', scope: { lbPostsOnPostSelected: '=', openPanel: '=', syndId: '=' }, link: function(scope) { scope.blogId = $routeParams._id; scope.store = new Store(IncomingSyndicationReducers, { posts: {}, syndication: {} }); scope.store.connect(function(state) { scope.posts = state.posts; scope.syndication = state.syndication; }); scope.goBack = function() { scope.openPanel('ingest', null); }; IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); IncomingSyndicationActions .getSyndication(scope.syndId); // On incoming post, we reload all the posts. // Not very fast, but easy to setup scope.$on('posts', function(e, data) { if (data.posts[0].syndication_in) IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); }); scope.$on('$destroy', scope.store.destroy); } }; }]);
4
3
1
mixed
--- a/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js +++ b/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js @@ -36,5 +36,6 @@ // Not very fast, but easy to setup - scope.$on('posts', function() { - IncomingSyndicationActions - .getPosts(scope.blogId, scope.syndId); + scope.$on('posts', function(e, data) { + if (data.posts[0].syndication_in) + IncomingSyndicationActions + .getPosts(scope.blogId, scope.syndId); });
--- a/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js +++ b/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js @@ ... @@ // Not very fast, but easy to setup - scope.$on('posts', function() { - IncomingSyndicationActions - .getPosts(scope.blogId, scope.syndId); + scope.$on('posts', function(e, data) { + if (data.posts[0].syndication_in) + IncomingSyndicationActions + .getPosts(scope.blogId, scope.syndId); });
--- a/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js +++ b/client/app/scripts/liveblog-syndication/directives/incoming-syndication.js @@ -36,5 +36,6 @@ CON // Not very fast, but easy to setup DEL scope.$on('posts', function() { DEL IncomingSyndicationActions DEL .getPosts(scope.blogId, scope.syndId); ADD scope.$on('posts', function(e, data) { ADD if (data.posts[0].syndication_in) ADD IncomingSyndicationActions ADD .getPosts(scope.blogId, scope.syndId); CON });
<<<<<<< SEARCH // On incoming post, we reload all the posts. // Not very fast, but easy to setup scope.$on('posts', function() { IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); }); ======= // On incoming post, we reload all the posts. // Not very fast, but easy to setup scope.$on('posts', function(e, data) { if (data.posts[0].syndication_in) IncomingSyndicationActions .getPosts(scope.blogId, scope.syndId); }); >>>>>>> REPLACE
paulfanelli/planet_alignment
1423085230a010b5a96ce9eaf63abeb9e5af58a4
setup.py
python
mit
Add a data_files option to install the config and plugin files in /etc/planet_alignment.
import os try: from setuptools import setup except ImportError: from distutils.core import setup # Utility function to read the README file, used for the long desc. def read(fn): return open(os.path.join(os.path.dirname(__file__), fn)).read() setup( name='planet_alignment', version='1.0.0', packages=[ 'planet_alignment', 'planet_alignment.app', 'planet_alignment.cmd', 'planet_alignment.config', 'planet_alignment.data', 'planet_alignment.mgr', 'planet_alignment.plugins', 'planet_alignment.test', 'planet_alignment.utils' ], url='https://github.com/paulfanelli/planet_alignment.git', license='MIT', author='Paul Fanelli', author_email='[email protected]', description='Planet Alignment program', long_description=read('README'), install_requires=['bunch', 'zope.interface', 'PyYAML'], tests_require=['pytest'], entry_points={ 'console_scripts': [ 'planet_alignment = planet_alignment.__main__:main' ] }, include_package_data=True )
import os try: from setuptools import setup except ImportError: from distutils.core import setup # Utility function to read the README file, used for the long desc. def read(fn): return open(os.path.join(os.path.dirname(__file__), fn)).read() setup( name='planet_alignment', version='1.0.0', packages=[ 'planet_alignment', 'planet_alignment.app', 'planet_alignment.cmd', 'planet_alignment.config', 'planet_alignment.data', 'planet_alignment.mgr', 'planet_alignment.plugins', 'planet_alignment.test', 'planet_alignment.utils' ], url='https://github.com/paulfanelli/planet_alignment.git', license='MIT', author='Paul Fanelli', author_email='[email protected]', description='Planet Alignment program', long_description=read('README'), install_requires=['bunch', 'zope.interface', 'PyYAML'], tests_require=['pytest'], entry_points={ 'console_scripts': [ 'planet_alignment = planet_alignment.__main__:main' ] }, include_package_data=True, data_files=[ ('/etc/planet_alignment', ['align1.py', 'align2.py', 'system.yaml']) ] )
4
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -40,3 +40,6 @@ }, - include_package_data=True + include_package_data=True, + data_files=[ + ('/etc/planet_alignment', ['align1.py', 'align2.py', 'system.yaml']) + ] )
--- a/setup.py +++ b/setup.py @@ ... @@ }, - include_package_data=True + include_package_data=True, + data_files=[ + ('/etc/planet_alignment', ['align1.py', 'align2.py', 'system.yaml']) + ] )
--- a/setup.py +++ b/setup.py @@ -40,3 +40,6 @@ CON }, DEL include_package_data=True ADD include_package_data=True, ADD data_files=[ ADD ('/etc/planet_alignment', ['align1.py', 'align2.py', 'system.yaml']) ADD ] CON )
<<<<<<< SEARCH ] }, include_package_data=True ) ======= ] }, include_package_data=True, data_files=[ ('/etc/planet_alignment', ['align1.py', 'align2.py', 'system.yaml']) ] ) >>>>>>> REPLACE
sourrust/flac
3223bba9bc100a5c48fa8fee3f7d5954553007ca
src/frame/subframe/types.rs
rust
bsd-3-clause
Add new initializer for `PartitionedRiceContents`
pub const MAX_FIXED_ORDER: usize = 4; pub struct SubFrame { pub data: Data, pub wasted_bits: u32, } pub enum Data { Constant(i32), Verbatim(Vec<i32>), Fixed(Fixed), } pub struct Fixed { pub entropy_coding_method: EntropyCodingMethod, pub order: u8, pub warmup: [i32; MAX_FIXED_ORDER], pub residual: Vec<i32>, } pub struct EntropyCodingMethod { pub method_type: CodingMethod, pub data: PartitionedRice, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum CodingMethod { PartitionedRice, PartitionedRice2, } pub struct PartitionedRice { pub order: u32, pub contents: PartitionedRiceContents, } pub struct PartitionedRiceContents { pub parameters: Vec<u32>, pub raw_bits: Vec<u32>, pub capacity_by_order: u32, }
pub const MAX_FIXED_ORDER: usize = 4; pub struct SubFrame { pub data: Data, pub wasted_bits: u32, } pub enum Data { Constant(i32), Verbatim(Vec<i32>), Fixed(Fixed), } pub struct Fixed { pub entropy_coding_method: EntropyCodingMethod, pub order: u8, pub warmup: [i32; MAX_FIXED_ORDER], pub residual: Vec<i32>, } pub struct EntropyCodingMethod { pub method_type: CodingMethod, pub data: PartitionedRice, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum CodingMethod { PartitionedRice, PartitionedRice2, } pub struct PartitionedRice { pub order: u32, pub contents: PartitionedRiceContents, } pub struct PartitionedRiceContents { pub parameters: Vec<u32>, pub raw_bits: Vec<u32>, pub capacity_by_order: u32, } impl PartitionedRiceContents { pub fn new(capacity_by_order: u32) -> PartitionedRiceContents { let capacity = 2_usize.pow(capacity_by_order); let mut parameters = Vec::with_capacity(capacity); let mut raw_bits = Vec::with_capacity(capacity); unsafe { parameters.set_len(capacity); raw_bits.set_len(capacity); } PartitionedRiceContents { parameters: parameters, raw_bits: raw_bits, capacity_by_order: capacity_by_order, } } }
19
0
1
add_only
--- a/src/frame/subframe/types.rs +++ b/src/frame/subframe/types.rs @@ -41 +41,20 @@ } + +impl PartitionedRiceContents { + pub fn new(capacity_by_order: u32) -> PartitionedRiceContents { + let capacity = 2_usize.pow(capacity_by_order); + let mut parameters = Vec::with_capacity(capacity); + let mut raw_bits = Vec::with_capacity(capacity); + + unsafe { + parameters.set_len(capacity); + raw_bits.set_len(capacity); + } + + PartitionedRiceContents { + parameters: parameters, + raw_bits: raw_bits, + capacity_by_order: capacity_by_order, + } + } +}
--- a/src/frame/subframe/types.rs +++ b/src/frame/subframe/types.rs @@ ... @@ } + +impl PartitionedRiceContents { + pub fn new(capacity_by_order: u32) -> PartitionedRiceContents { + let capacity = 2_usize.pow(capacity_by_order); + let mut parameters = Vec::with_capacity(capacity); + let mut raw_bits = Vec::with_capacity(capacity); + + unsafe { + parameters.set_len(capacity); + raw_bits.set_len(capacity); + } + + PartitionedRiceContents { + parameters: parameters, + raw_bits: raw_bits, + capacity_by_order: capacity_by_order, + } + } +}
--- a/src/frame/subframe/types.rs +++ b/src/frame/subframe/types.rs @@ -41 +41,20 @@ CON } ADD ADD impl PartitionedRiceContents { ADD pub fn new(capacity_by_order: u32) -> PartitionedRiceContents { ADD let capacity = 2_usize.pow(capacity_by_order); ADD let mut parameters = Vec::with_capacity(capacity); ADD let mut raw_bits = Vec::with_capacity(capacity); ADD ADD unsafe { ADD parameters.set_len(capacity); ADD raw_bits.set_len(capacity); ADD } ADD ADD PartitionedRiceContents { ADD parameters: parameters, ADD raw_bits: raw_bits, ADD capacity_by_order: capacity_by_order, ADD } ADD } ADD }
<<<<<<< SEARCH pub capacity_by_order: u32, } ======= pub capacity_by_order: u32, } impl PartitionedRiceContents { pub fn new(capacity_by_order: u32) -> PartitionedRiceContents { let capacity = 2_usize.pow(capacity_by_order); let mut parameters = Vec::with_capacity(capacity); let mut raw_bits = Vec::with_capacity(capacity); unsafe { parameters.set_len(capacity); raw_bits.set_len(capacity); } PartitionedRiceContents { parameters: parameters, raw_bits: raw_bits, capacity_by_order: capacity_by_order, } } } >>>>>>> REPLACE
MarkusAmshove/Kluent
be463c6823c2fc26e8189b513674ba181454750a
jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt
kotlin
mit
Add unit tests for times verification mode
package org.amshove.kluent.tests.mocking import org.amshove.kluent.* import org.amshove.kluent.tests.helpclasses.Database import kotlin.test.Test import kotlin.test.assertFails class VerifyCalledOnceShould { @Test fun passWhenAMethodWasCalledOnlyOnce() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(1) Verify times 2 on mock that mock.getPerson(1) was called assertFails { Verify on mock that mock.getPerson(1) was called } } @Test fun failWhenAMethodWasNotCalled() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 1 on mock that mock.getPerson(5) was called } } @Test fun failWhenTimesThatAMethodWasCalledDoesNotMatch() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 2 on mock that mock.getPerson(1) was called } } }
package org.amshove.kluent.tests.mocking import org.amshove.kluent.* import org.amshove.kluent.tests.helpclasses.Database import kotlin.test.Test import kotlin.test.assertFails class VerifyUsingTimesShould { @Test fun passWhenAMethodWasCalledSpecifiedTimes() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(5) Verify on mock that mock.getPerson(1) was called Verify on mock that mock.getPerson(5) was called Verify times 2 on mock that mock.getPerson(any()) was called } @Test fun failWhenAMethodWasNotCalled() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 1 on mock that mock.getPerson(5) was called } } @Test fun failWhenAMethodWasCalledLessThanSpecified() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 2 on mock that mock.getPerson(any()) was called } } @Test fun failWhenAMethodWasCalledMoreThanSpecified() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(5) Verify on mock that mock.getPerson(1) was called Verify on mock that mock.getPerson(5) was called assertFails { Verify times 1 on mock that mock.getPerson(any()) was called } } }
18
7
3
mixed
--- a/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt +++ b/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt @@ -7,11 +7,12 @@ -class VerifyCalledOnceShould { +class VerifyUsingTimesShould { @Test - fun passWhenAMethodWasCalledOnlyOnce() { + fun passWhenAMethodWasCalledSpecifiedTimes() { val mock = mock(Database::class) mock.getPerson(1) - mock.getPerson(1) - Verify times 2 on mock that mock.getPerson(1) was called - assertFails { Verify on mock that mock.getPerson(1) was called } + mock.getPerson(5) + Verify on mock that mock.getPerson(1) was called + Verify on mock that mock.getPerson(5) was called + Verify times 2 on mock that mock.getPerson(any()) was called } @@ -27,3 +28,3 @@ @Test - fun failWhenTimesThatAMethodWasCalledDoesNotMatch() { + fun failWhenAMethodWasCalledLessThanSpecified() { val mock = mock(Database::class) @@ -31,3 +32,13 @@ Verify on mock that mock.getPerson(1) was called - assertFails { Verify times 2 on mock that mock.getPerson(1) was called } + assertFails { Verify times 2 on mock that mock.getPerson(any()) was called } + } + + @Test + fun failWhenAMethodWasCalledMoreThanSpecified() { + val mock = mock(Database::class) + mock.getPerson(1) + mock.getPerson(5) + Verify on mock that mock.getPerson(1) was called + Verify on mock that mock.getPerson(5) was called + assertFails { Verify times 1 on mock that mock.getPerson(any()) was called } }
--- a/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt +++ b/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt @@ ... @@ -class VerifyCalledOnceShould { +class VerifyUsingTimesShould { @Test - fun passWhenAMethodWasCalledOnlyOnce() { + fun passWhenAMethodWasCalledSpecifiedTimes() { val mock = mock(Database::class) mock.getPerson(1) - mock.getPerson(1) - Verify times 2 on mock that mock.getPerson(1) was called - assertFails { Verify on mock that mock.getPerson(1) was called } + mock.getPerson(5) + Verify on mock that mock.getPerson(1) was called + Verify on mock that mock.getPerson(5) was called + Verify times 2 on mock that mock.getPerson(any()) was called } @@ ... @@ @Test - fun failWhenTimesThatAMethodWasCalledDoesNotMatch() { + fun failWhenAMethodWasCalledLessThanSpecified() { val mock = mock(Database::class) @@ ... @@ Verify on mock that mock.getPerson(1) was called - assertFails { Verify times 2 on mock that mock.getPerson(1) was called } + assertFails { Verify times 2 on mock that mock.getPerson(any()) was called } + } + + @Test + fun failWhenAMethodWasCalledMoreThanSpecified() { + val mock = mock(Database::class) + mock.getPerson(1) + mock.getPerson(5) + Verify on mock that mock.getPerson(1) was called + Verify on mock that mock.getPerson(5) was called + assertFails { Verify times 1 on mock that mock.getPerson(any()) was called } }
--- a/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt +++ b/jvm/src/test/kotlin/org/amshove/kluent/tests/mocking/VerifyCalledOnceShould.kt @@ -7,11 +7,12 @@ CON DEL class VerifyCalledOnceShould { ADD class VerifyUsingTimesShould { CON CON @Test DEL fun passWhenAMethodWasCalledOnlyOnce() { ADD fun passWhenAMethodWasCalledSpecifiedTimes() { CON val mock = mock(Database::class) CON mock.getPerson(1) DEL mock.getPerson(1) DEL Verify times 2 on mock that mock.getPerson(1) was called DEL assertFails { Verify on mock that mock.getPerson(1) was called } ADD mock.getPerson(5) ADD Verify on mock that mock.getPerson(1) was called ADD Verify on mock that mock.getPerson(5) was called ADD Verify times 2 on mock that mock.getPerson(any()) was called CON } @@ -27,3 +28,3 @@ CON @Test DEL fun failWhenTimesThatAMethodWasCalledDoesNotMatch() { ADD fun failWhenAMethodWasCalledLessThanSpecified() { CON val mock = mock(Database::class) @@ -31,3 +32,13 @@ CON Verify on mock that mock.getPerson(1) was called DEL assertFails { Verify times 2 on mock that mock.getPerson(1) was called } ADD assertFails { Verify times 2 on mock that mock.getPerson(any()) was called } ADD } ADD ADD @Test ADD fun failWhenAMethodWasCalledMoreThanSpecified() { ADD val mock = mock(Database::class) ADD mock.getPerson(1) ADD mock.getPerson(5) ADD Verify on mock that mock.getPerson(1) was called ADD Verify on mock that mock.getPerson(5) was called ADD assertFails { Verify times 1 on mock that mock.getPerson(any()) was called } CON }
<<<<<<< SEARCH import kotlin.test.assertFails class VerifyCalledOnceShould { @Test fun passWhenAMethodWasCalledOnlyOnce() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(1) Verify times 2 on mock that mock.getPerson(1) was called assertFails { Verify on mock that mock.getPerson(1) was called } } ======= import kotlin.test.assertFails class VerifyUsingTimesShould { @Test fun passWhenAMethodWasCalledSpecifiedTimes() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(5) Verify on mock that mock.getPerson(1) was called Verify on mock that mock.getPerson(5) was called Verify times 2 on mock that mock.getPerson(any()) was called } >>>>>>> REPLACE <<<<<<< SEARCH @Test fun failWhenTimesThatAMethodWasCalledDoesNotMatch() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 2 on mock that mock.getPerson(1) was called } } } ======= @Test fun failWhenAMethodWasCalledLessThanSpecified() { val mock = mock(Database::class) mock.getPerson(1) Verify on mock that mock.getPerson(1) was called assertFails { Verify times 2 on mock that mock.getPerson(any()) was called } } @Test fun failWhenAMethodWasCalledMoreThanSpecified() { val mock = mock(Database::class) mock.getPerson(1) mock.getPerson(5) Verify on mock that mock.getPerson(1) was called Verify on mock that mock.getPerson(5) was called assertFails { Verify times 1 on mock that mock.getPerson(any()) was called } } } >>>>>>> REPLACE
piemonkey/TodoTree-prototype
a74a5a59f08ae21b9f8aa5ecd5e2c1914c5e3eaf
app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java
java
mit
Add number of sub items to model
package com.placeholder.rich.todotreeprototype.model; public class Item { private final String name; private boolean complete; public Item(String name) { this(name, false); } public Item(String name, boolean complete) { this.name = name; this.complete = complete; } public String getName() { return name; } public boolean isComplete() { return complete; } public boolean toggleComplete() { return complete = !complete; } }
package com.placeholder.rich.todotreeprototype.model; public class Item { private final String name; private boolean complete; private int nSubItems; private int nItemsLeft; public Item(String name) { this(name, false, 0, 0); } public Item(String name, boolean complete) { this(name, complete, 0, 0); } public Item(String name, boolean complete, int nSubItems, int nItemsLeft) { this.name = name; this.complete = complete; this.nSubItems = nSubItems; this.nItemsLeft = nItemsLeft; } public String getName() { return name; } public boolean isComplete() { return complete; } public boolean toggleComplete() { return complete = !complete; } public int getNSubItems() { return nSubItems; } public int getNItemsLeft() { return nItemsLeft; } public boolean hasSubItems() { return nSubItems > 0; } }
21
1
3
mixed
--- a/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java +++ b/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java @@ -5,5 +5,7 @@ private boolean complete; + private int nSubItems; + private int nItemsLeft; public Item(String name) { - this(name, false); + this(name, false, 0, 0); } @@ -11,4 +13,10 @@ public Item(String name, boolean complete) { + this(name, complete, 0, 0); + } + + public Item(String name, boolean complete, int nSubItems, int nItemsLeft) { this.name = name; this.complete = complete; + this.nSubItems = nSubItems; + this.nItemsLeft = nItemsLeft; } @@ -27,2 +35,14 @@ + public int getNSubItems() { + return nSubItems; + } + + public int getNItemsLeft() { + return nItemsLeft; + } + + public boolean hasSubItems() { + return nSubItems > 0; + } + }
--- a/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java +++ b/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java @@ ... @@ private boolean complete; + private int nSubItems; + private int nItemsLeft; public Item(String name) { - this(name, false); + this(name, false, 0, 0); } @@ ... @@ public Item(String name, boolean complete) { + this(name, complete, 0, 0); + } + + public Item(String name, boolean complete, int nSubItems, int nItemsLeft) { this.name = name; this.complete = complete; + this.nSubItems = nSubItems; + this.nItemsLeft = nItemsLeft; } @@ ... @@ + public int getNSubItems() { + return nSubItems; + } + + public int getNItemsLeft() { + return nItemsLeft; + } + + public boolean hasSubItems() { + return nSubItems > 0; + } + }
--- a/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java +++ b/app/src/main/java/com/placeholder/rich/todotreeprototype/model/Item.java @@ -5,5 +5,7 @@ CON private boolean complete; ADD private int nSubItems; ADD private int nItemsLeft; CON CON public Item(String name) { DEL this(name, false); ADD this(name, false, 0, 0); CON } @@ -11,4 +13,10 @@ CON public Item(String name, boolean complete) { ADD this(name, complete, 0, 0); ADD } ADD ADD public Item(String name, boolean complete, int nSubItems, int nItemsLeft) { CON this.name = name; CON this.complete = complete; ADD this.nSubItems = nSubItems; ADD this.nItemsLeft = nItemsLeft; CON } @@ -27,2 +35,14 @@ CON ADD public int getNSubItems() { ADD return nSubItems; ADD } ADD ADD public int getNItemsLeft() { ADD return nItemsLeft; ADD } ADD ADD public boolean hasSubItems() { ADD return nSubItems > 0; ADD } ADD CON }
<<<<<<< SEARCH private final String name; private boolean complete; public Item(String name) { this(name, false); } public Item(String name, boolean complete) { this.name = name; this.complete = complete; } ======= private final String name; private boolean complete; private int nSubItems; private int nItemsLeft; public Item(String name) { this(name, false, 0, 0); } public Item(String name, boolean complete) { this(name, complete, 0, 0); } public Item(String name, boolean complete, int nSubItems, int nItemsLeft) { this.name = name; this.complete = complete; this.nSubItems = nSubItems; this.nItemsLeft = nItemsLeft; } >>>>>>> REPLACE <<<<<<< SEARCH } } ======= } public int getNSubItems() { return nSubItems; } public int getNItemsLeft() { return nItemsLeft; } public boolean hasSubItems() { return nSubItems > 0; } } >>>>>>> REPLACE
nickvandewiele/RMG-Py
98ca37ed174e281542df2f1026a298387845b524
rmgpy/tools/data/generate/input.py
python
mit
Cut down on the loading of families in the normal GenerateReactionsTest Change generateReactions input reactant to propyl
# Data sources for kinetics database( thermoLibraries = ['primaryThermoLibrary'], reactionLibraries = [], seedMechanisms = [], kineticsDepositories = 'default', #this section lists possible reaction families to find reactioons with kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'], kineticsEstimator = 'rate rules', ) # List all species you want reactions between species( label='ethane', reactive=True, structure=SMILES("CC"), ) species( label='H', reactive=True, structure=SMILES("[H]"), ) species( label='butane', reactive=True, structure=SMILES("CCCC"), ) # you must list reactor conditions (though this may not effect the output) simpleReactor( temperature=(650,'K'), pressure=(10.0,'bar'), initialMoleFractions={ "ethane": 1, }, terminationConversion={ 'butane': .99, }, terminationTime=(40,'s'), )
# Data sources for kinetics database( thermoLibraries = ['primaryThermoLibrary'], reactionLibraries = [], seedMechanisms = [], kineticsDepositories = 'default', #this section lists possible reaction families to find reactioons with kineticsFamilies = ['R_Recombination'], kineticsEstimator = 'rate rules', ) # List all species you want reactions between species( label='Propyl', reactive=True, structure=SMILES("CC[CH3]"), ) species( label='H', reactive=True, structure=SMILES("[H]"), ) # you must list reactor conditions (though this may not effect the output) simpleReactor( temperature=(650,'K'), pressure=(10.0,'bar'), initialMoleFractions={ "Propyl": 1, }, terminationConversion={ 'Propyl': .99, }, terminationTime=(40,'s'), )
5
11
4
mixed
--- a/rmgpy/tools/data/generate/input.py +++ b/rmgpy/tools/data/generate/input.py @@ -7,3 +7,3 @@ #this section lists possible reaction families to find reactioons with - kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'], + kineticsFamilies = ['R_Recombination'], kineticsEstimator = 'rate rules', @@ -13,5 +13,5 @@ species( - label='ethane', + label='Propyl', reactive=True, - structure=SMILES("CC"), + structure=SMILES("CC[CH3]"), ) @@ -24,8 +24,2 @@ -species( - label='butane', - reactive=True, - structure=SMILES("CCCC"), -) - @@ -36,6 +30,6 @@ initialMoleFractions={ - "ethane": 1, + "Propyl": 1, }, terminationConversion={ - 'butane': .99, + 'Propyl': .99, },
--- a/rmgpy/tools/data/generate/input.py +++ b/rmgpy/tools/data/generate/input.py @@ ... @@ #this section lists possible reaction families to find reactioons with - kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'], + kineticsFamilies = ['R_Recombination'], kineticsEstimator = 'rate rules', @@ ... @@ species( - label='ethane', + label='Propyl', reactive=True, - structure=SMILES("CC"), + structure=SMILES("CC[CH3]"), ) @@ ... @@ -species( - label='butane', - reactive=True, - structure=SMILES("CCCC"), -) - @@ ... @@ initialMoleFractions={ - "ethane": 1, + "Propyl": 1, }, terminationConversion={ - 'butane': .99, + 'Propyl': .99, },
--- a/rmgpy/tools/data/generate/input.py +++ b/rmgpy/tools/data/generate/input.py @@ -7,3 +7,3 @@ CON #this section lists possible reaction families to find reactioons with DEL kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'], ADD kineticsFamilies = ['R_Recombination'], CON kineticsEstimator = 'rate rules', @@ -13,5 +13,5 @@ CON species( DEL label='ethane', ADD label='Propyl', CON reactive=True, DEL structure=SMILES("CC"), ADD structure=SMILES("CC[CH3]"), CON ) @@ -24,8 +24,2 @@ CON DEL species( DEL label='butane', DEL reactive=True, DEL structure=SMILES("CCCC"), DEL ) DEL CON @@ -36,6 +30,6 @@ CON initialMoleFractions={ DEL "ethane": 1, ADD "Propyl": 1, CON }, CON terminationConversion={ DEL 'butane': .99, ADD 'Propyl': .99, CON },
<<<<<<< SEARCH kineticsDepositories = 'default', #this section lists possible reaction families to find reactioons with kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'], kineticsEstimator = 'rate rules', ) # List all species you want reactions between species( label='ethane', reactive=True, structure=SMILES("CC"), ) ======= kineticsDepositories = 'default', #this section lists possible reaction families to find reactioons with kineticsFamilies = ['R_Recombination'], kineticsEstimator = 'rate rules', ) # List all species you want reactions between species( label='Propyl', reactive=True, structure=SMILES("CC[CH3]"), ) >>>>>>> REPLACE <<<<<<< SEARCH ) species( label='butane', reactive=True, structure=SMILES("CCCC"), ) # you must list reactor conditions (though this may not effect the output) ======= ) # you must list reactor conditions (though this may not effect the output) >>>>>>> REPLACE <<<<<<< SEARCH pressure=(10.0,'bar'), initialMoleFractions={ "ethane": 1, }, terminationConversion={ 'butane': .99, }, terminationTime=(40,'s'), ) ======= pressure=(10.0,'bar'), initialMoleFractions={ "Propyl": 1, }, terminationConversion={ 'Propyl': .99, }, terminationTime=(40,'s'), ) >>>>>>> REPLACE
release-notes/release-notes-hub
82853862625336f68ec127b25a33c32b6869d2cb
controllers/IndexController.js
javascript
mit
Load the latest 6 release notes to the start page.
'use strict'; const AbstractController = require('./AbstractController'); const ReleaseNotesLoader = require('@release-notes/node/lib/ReleaseNotesLoader'); const multer = require('multer'); const releaseNotesLoader = new ReleaseNotesLoader(); const uploadHandler = multer(); class IndexController extends AbstractController { indexAction(req, res, next) { res.render('index/index', {}); } showReleaseNotesAction(req, res, next) { if (!req.file) { return void res.render('index/index', { err: new Error('No release-notes.yml file was uploaded.') }); } releaseNotesLoader.loadReleaseNotes(req.file.buffer, (err, releaseNotes) => { if (err) { res.statusCode = 400; } res.render('index/index', { err, releaseNotes, }) }); } getRoutes() { return { '/': [{ handler: (req, res, next) => this.indexAction(req, res, next) }, { method: 'post', handler: [ uploadHandler.single('release-notes'), (req, res, next) => this.showReleaseNotesAction(req, res, next), ] }], } }; } module.exports = IndexController;
'use strict'; const AbstractController = require('./AbstractController'); class IndexController extends AbstractController { indexAction(req, res, next) { this.serviceManager.get('releaseNotesRepository').findNewest( 6, (err, releaseNotesList) => { if (err) { return void next(err); } res.render('index/index', { newest: releaseNotesList, }); } ); } getRoutes() { return { '/': [{ handler: (req, res, next) => this.indexAction(req, res, next) }], } }; } module.exports = IndexController;
9
29
3
mixed
--- a/controllers/IndexController.js +++ b/controllers/IndexController.js @@ -3,7 +3,2 @@ const AbstractController = require('./AbstractController'); -const ReleaseNotesLoader = require('@release-notes/node/lib/ReleaseNotesLoader'); -const multer = require('multer'); - -const releaseNotesLoader = new ReleaseNotesLoader(); -const uploadHandler = multer(); @@ -11,22 +6,13 @@ indexAction(req, res, next) { - res.render('index/index', {}); - } + this.serviceManager.get('releaseNotesRepository').findNewest( + 6, (err, releaseNotesList) => { + if (err) { + return void next(err); + } - showReleaseNotesAction(req, res, next) { - if (!req.file) { - return void res.render('index/index', { - err: new Error('No release-notes.yml file was uploaded.') - }); - } - - releaseNotesLoader.loadReleaseNotes(req.file.buffer, (err, releaseNotes) => { - if (err) { - res.statusCode = 400; + res.render('index/index', { + newest: releaseNotesList, + }); } - - res.render('index/index', { - err, - releaseNotes, - }) - }); + ); } @@ -37,8 +23,2 @@ handler: (req, res, next) => this.indexAction(req, res, next) - }, { - method: 'post', - handler: [ - uploadHandler.single('release-notes'), - (req, res, next) => this.showReleaseNotesAction(req, res, next), - ] }],
--- a/controllers/IndexController.js +++ b/controllers/IndexController.js @@ ... @@ const AbstractController = require('./AbstractController'); -const ReleaseNotesLoader = require('@release-notes/node/lib/ReleaseNotesLoader'); -const multer = require('multer'); - -const releaseNotesLoader = new ReleaseNotesLoader(); -const uploadHandler = multer(); @@ ... @@ indexAction(req, res, next) { - res.render('index/index', {}); - } + this.serviceManager.get('releaseNotesRepository').findNewest( + 6, (err, releaseNotesList) => { + if (err) { + return void next(err); + } - showReleaseNotesAction(req, res, next) { - if (!req.file) { - return void res.render('index/index', { - err: new Error('No release-notes.yml file was uploaded.') - }); - } - - releaseNotesLoader.loadReleaseNotes(req.file.buffer, (err, releaseNotes) => { - if (err) { - res.statusCode = 400; + res.render('index/index', { + newest: releaseNotesList, + }); } - - res.render('index/index', { - err, - releaseNotes, - }) - }); + ); } @@ ... @@ handler: (req, res, next) => this.indexAction(req, res, next) - }, { - method: 'post', - handler: [ - uploadHandler.single('release-notes'), - (req, res, next) => this.showReleaseNotesAction(req, res, next), - ] }],
--- a/controllers/IndexController.js +++ b/controllers/IndexController.js @@ -3,7 +3,2 @@ CON const AbstractController = require('./AbstractController'); DEL const ReleaseNotesLoader = require('@release-notes/node/lib/ReleaseNotesLoader'); DEL const multer = require('multer'); DEL DEL const releaseNotesLoader = new ReleaseNotesLoader(); DEL const uploadHandler = multer(); CON @@ -11,22 +6,13 @@ CON indexAction(req, res, next) { DEL res.render('index/index', {}); DEL } ADD this.serviceManager.get('releaseNotesRepository').findNewest( ADD 6, (err, releaseNotesList) => { ADD if (err) { ADD return void next(err); ADD } CON DEL showReleaseNotesAction(req, res, next) { DEL if (!req.file) { DEL return void res.render('index/index', { DEL err: new Error('No release-notes.yml file was uploaded.') DEL }); DEL } DEL DEL releaseNotesLoader.loadReleaseNotes(req.file.buffer, (err, releaseNotes) => { DEL if (err) { DEL res.statusCode = 400; ADD res.render('index/index', { ADD newest: releaseNotesList, ADD }); CON } DEL DEL res.render('index/index', { DEL err, DEL releaseNotes, DEL }) DEL }); ADD ); CON } @@ -37,8 +23,2 @@ CON handler: (req, res, next) => this.indexAction(req, res, next) DEL }, { DEL method: 'post', DEL handler: [ DEL uploadHandler.single('release-notes'), DEL (req, res, next) => this.showReleaseNotesAction(req, res, next), DEL ] CON }],
<<<<<<< SEARCH const AbstractController = require('./AbstractController'); const ReleaseNotesLoader = require('@release-notes/node/lib/ReleaseNotesLoader'); const multer = require('multer'); const releaseNotesLoader = new ReleaseNotesLoader(); const uploadHandler = multer(); class IndexController extends AbstractController { indexAction(req, res, next) { res.render('index/index', {}); } showReleaseNotesAction(req, res, next) { if (!req.file) { return void res.render('index/index', { err: new Error('No release-notes.yml file was uploaded.') }); } releaseNotesLoader.loadReleaseNotes(req.file.buffer, (err, releaseNotes) => { if (err) { res.statusCode = 400; } res.render('index/index', { err, releaseNotes, }) }); } ======= const AbstractController = require('./AbstractController'); class IndexController extends AbstractController { indexAction(req, res, next) { this.serviceManager.get('releaseNotesRepository').findNewest( 6, (err, releaseNotesList) => { if (err) { return void next(err); } res.render('index/index', { newest: releaseNotesList, }); } ); } >>>>>>> REPLACE <<<<<<< SEARCH '/': [{ handler: (req, res, next) => this.indexAction(req, res, next) }, { method: 'post', handler: [ uploadHandler.single('release-notes'), (req, res, next) => this.showReleaseNotesAction(req, res, next), ] }], } ======= '/': [{ handler: (req, res, next) => this.indexAction(req, res, next) }], } >>>>>>> REPLACE
Madsn/ripple-client
6f4a3ca3df610de6fdbd8a2a148641f57c0eee12
config-example.js
javascript
isc
Set default connection_offset to 0 seconds
/** * Ripple Client Configuration * * Copy this file to config.js and edit to suit your preferences. */ var Options = { server: { trace : true, trusted: true, local_signing: true, servers: [ { host: 's_west.ripple.com', port: 443, secure: true }, { host: 's_east.ripple.com', port: 443, secure: true } ], connection_offset: 1 }, blobvault : 'https://blobvault.payward.com', // If set, login will persist across sessions (page reload). This is mostly // intended for developers, be careful about using this in a real setting. persistent_auth : false, // Number of transactions each page has in balance tab notifications transactions_per_page: 50, // Configure bridges bridge: { out: { // 'bitcoin': 'localhost:3000' // 'bitcoin': 'https://www.bitstamp.net/ripple/bridge/out/bitcoin/' } } }; // Load client-side overrides if (store.enabled) { $.extend(true, Options, JSON.parse(store.get('ripple_settings') || '{}')); }
/** * Ripple Client Configuration * * Copy this file to config.js and edit to suit your preferences. */ var Options = { server: { trace : true, trusted: true, local_signing: true, servers: [ { host: 's_west.ripple.com', port: 443, secure: true }, { host: 's_east.ripple.com', port: 443, secure: true } ], connection_offset: 0 }, blobvault : 'https://blobvault.payward.com', // If set, login will persist across sessions (page reload). This is mostly // intended for developers, be careful about using this in a real setting. persistent_auth : false, // Number of transactions each page has in balance tab notifications transactions_per_page: 50, // Configure bridges bridge: { out: { // 'bitcoin': 'localhost:3000' // 'bitcoin': 'https://www.bitstamp.net/ripple/bridge/out/bitcoin/' } } }; // Load client-side overrides if (store.enabled) { $.extend(true, Options, JSON.parse(store.get('ripple_settings') || '{}')); }
1
1
1
mixed
--- a/config-example.js +++ b/config-example.js @@ -16,3 +16,3 @@ - connection_offset: 1 + connection_offset: 0 },
--- a/config-example.js +++ b/config-example.js @@ ... @@ - connection_offset: 1 + connection_offset: 0 },
--- a/config-example.js +++ b/config-example.js @@ -16,3 +16,3 @@ CON DEL connection_offset: 1 ADD connection_offset: 0 CON },
<<<<<<< SEARCH ], connection_offset: 1 }, ======= ], connection_offset: 0 }, >>>>>>> REPLACE
group-policy/rally
d8d2e4b763fbd7cedc42046f6f45395bf15caa79
samples/plugins/scenario/scenario_plugin.py
python
apache-2.0
Fix the scenario plugin sample We forgot to fix scenario plugin sample when we were doing rally.task.scenario refactoring Change-Id: Iadbb960cf168bd3b9cd6c1881a5f7a8dffd7036f
# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.task.scenarios import base class ScenarioPlugin(base.Scenario): """Sample plugin which lists flavors.""" @base.atomic_action_timer("list_flavors") def _list_flavors(self): """Sample of usage clients - list flavors You can use self.context, self.admin_clients and self.clients which are initialized on scenario instance creation. """ self.clients("nova").flavors.list() @base.atomic_action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): """The same with admin clients.""" self.admin_clients("nova").flavors.list() @base.scenario() def list_flavors(self): """List flavors.""" self._list_flavors() self._list_flavors_as_admin()
# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack import scenario from rally.task import atomic class ScenarioPlugin(scenario.OpenStackScenario): """Sample plugin which lists flavors.""" @atomic.action_timer("list_flavors") def _list_flavors(self): """Sample of usage clients - list flavors You can use self.context, self.admin_clients and self.clients which are initialized on scenario instance creation. """ self.clients("nova").flavors.list() @atomic.action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): """The same with admin clients.""" self.admin_clients("nova").flavors.list() @scenario.configure() def list_flavors(self): """List flavors.""" self._list_flavors() self._list_flavors_as_admin()
6
5
3
mixed
--- a/samples/plugins/scenario/scenario_plugin.py +++ b/samples/plugins/scenario/scenario_plugin.py @@ -15,9 +15,10 @@ -from rally.task.scenarios import base +from rally.plugins.openstack import scenario +from rally.task import atomic -class ScenarioPlugin(base.Scenario): +class ScenarioPlugin(scenario.OpenStackScenario): """Sample plugin which lists flavors.""" - @base.atomic_action_timer("list_flavors") + @atomic.action_timer("list_flavors") def _list_flavors(self): @@ -30,3 +31,3 @@ - @base.atomic_action_timer("list_flavors_as_admin") + @atomic.action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): @@ -35,3 +36,3 @@ - @base.scenario() + @scenario.configure() def list_flavors(self):
--- a/samples/plugins/scenario/scenario_plugin.py +++ b/samples/plugins/scenario/scenario_plugin.py @@ ... @@ -from rally.task.scenarios import base +from rally.plugins.openstack import scenario +from rally.task import atomic -class ScenarioPlugin(base.Scenario): +class ScenarioPlugin(scenario.OpenStackScenario): """Sample plugin which lists flavors.""" - @base.atomic_action_timer("list_flavors") + @atomic.action_timer("list_flavors") def _list_flavors(self): @@ ... @@ - @base.atomic_action_timer("list_flavors_as_admin") + @atomic.action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): @@ ... @@ - @base.scenario() + @scenario.configure() def list_flavors(self):
--- a/samples/plugins/scenario/scenario_plugin.py +++ b/samples/plugins/scenario/scenario_plugin.py @@ -15,9 +15,10 @@ CON DEL from rally.task.scenarios import base ADD from rally.plugins.openstack import scenario ADD from rally.task import atomic CON CON DEL class ScenarioPlugin(base.Scenario): ADD class ScenarioPlugin(scenario.OpenStackScenario): CON """Sample plugin which lists flavors.""" CON DEL @base.atomic_action_timer("list_flavors") ADD @atomic.action_timer("list_flavors") CON def _list_flavors(self): @@ -30,3 +31,3 @@ CON DEL @base.atomic_action_timer("list_flavors_as_admin") ADD @atomic.action_timer("list_flavors_as_admin") CON def _list_flavors_as_admin(self): @@ -35,3 +36,3 @@ CON DEL @base.scenario() ADD @scenario.configure() CON def list_flavors(self):
<<<<<<< SEARCH # under the License. from rally.task.scenarios import base class ScenarioPlugin(base.Scenario): """Sample plugin which lists flavors.""" @base.atomic_action_timer("list_flavors") def _list_flavors(self): """Sample of usage clients - list flavors ======= # under the License. from rally.plugins.openstack import scenario from rally.task import atomic class ScenarioPlugin(scenario.OpenStackScenario): """Sample plugin which lists flavors.""" @atomic.action_timer("list_flavors") def _list_flavors(self): """Sample of usage clients - list flavors >>>>>>> REPLACE <<<<<<< SEARCH self.clients("nova").flavors.list() @base.atomic_action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): """The same with admin clients.""" self.admin_clients("nova").flavors.list() @base.scenario() def list_flavors(self): """List flavors.""" ======= self.clients("nova").flavors.list() @atomic.action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): """The same with admin clients.""" self.admin_clients("nova").flavors.list() @scenario.configure() def list_flavors(self): """List flavors.""" >>>>>>> REPLACE
PDXFinder/pdxfinder
36b8fd8e0c040a07d94283745f3ac06d66742c1d
data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java
java
apache-2.0
Create Search Filter for the Persisted Mapping Entities
package org.pdxfinder.rdbms.repositories; import org.pdxfinder.rdbms.dao.MappingEntity; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Map; @Repository public interface MappingEntityRepository extends JpaRepository<MappingEntity, Long> { MappingEntity findByMappingKey(String mappingKey); List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); }
package org.pdxfinder.rdbms.repositories; import org.pdxfinder.rdbms.dao.MappingEntity; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Map; /** * Created by abayomi on 25/07/2019. */ @Repository public interface MappingEntityRepository extends JpaRepository<MappingEntity, Long> { MappingEntity findByMappingKey(String mappingKey); List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); @Query(value = "Select distinct me from MappingEntity me JOIN me.mappingValues mv " + "WHERE ((lower(me.entityType) = lower(:entityType)) OR :entityType = '') "+ "AND ((KEY(mv) = :mappingLabel AND mv = :mappingValue) OR :mappingValue = '') " ) Page<MappingEntity> findByMultipleFilters(@Param("entityType") String entityType, @Param("mappingLabel") String mappingLabel, @Param("mappingValue") String mappingValue, Pageable pageable); @Query(value = "select me from MappingEntity me JOIN me.mappingValues mv WHERE KEY(mv) = :dataKey AND mv = :dataValue ") List<MappingEntity> findByAttributeAndValue(@Param("dataKey") String dataKey, @Param("dataValue") String dataValue); }
27
1
3
mixed
--- a/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java +++ b/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java @@ -3,4 +3,7 @@ import org.pdxfinder.rdbms.dao.MappingEntity; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; @@ -10,2 +13,6 @@ + +/** + * Created by abayomi on 25/07/2019. + */ @Repository @@ -16,4 +23,23 @@ + List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); - List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); + + @Query(value = "Select distinct me from MappingEntity me JOIN me.mappingValues mv " + + "WHERE ((lower(me.entityType) = lower(:entityType)) OR :entityType = '') "+ + "AND ((KEY(mv) = :mappingLabel AND mv = :mappingValue) OR :mappingValue = '') " + ) + Page<MappingEntity> findByMultipleFilters(@Param("entityType") String entityType, + + @Param("mappingLabel") String mappingLabel, + @Param("mappingValue") String mappingValue, + + Pageable pageable); + + + + @Query(value = "select me from MappingEntity me JOIN me.mappingValues mv WHERE KEY(mv) = :dataKey AND mv = :dataValue ") + List<MappingEntity> findByAttributeAndValue(@Param("dataKey") String dataKey, + @Param("dataValue") String dataValue); + + }
--- a/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java +++ b/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java @@ ... @@ import org.pdxfinder.rdbms.dao.MappingEntity; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; @@ ... @@ + +/** + * Created by abayomi on 25/07/2019. + */ @Repository @@ ... @@ + List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); - List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); + + @Query(value = "Select distinct me from MappingEntity me JOIN me.mappingValues mv " + + "WHERE ((lower(me.entityType) = lower(:entityType)) OR :entityType = '') "+ + "AND ((KEY(mv) = :mappingLabel AND mv = :mappingValue) OR :mappingValue = '') " + ) + Page<MappingEntity> findByMultipleFilters(@Param("entityType") String entityType, + + @Param("mappingLabel") String mappingLabel, + @Param("mappingValue") String mappingValue, + + Pageable pageable); + + + + @Query(value = "select me from MappingEntity me JOIN me.mappingValues mv WHERE KEY(mv) = :dataKey AND mv = :dataValue ") + List<MappingEntity> findByAttributeAndValue(@Param("dataKey") String dataKey, + @Param("dataValue") String dataValue); + + }
--- a/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java +++ b/data-model/src/main/java/org/pdxfinder/rdbms/repositories/MappingEntityRepository.java @@ -3,4 +3,7 @@ CON import org.pdxfinder.rdbms.dao.MappingEntity; ADD import org.springframework.data.domain.Page; ADD import org.springframework.data.domain.Pageable; CON import org.springframework.data.jpa.repository.JpaRepository; CON import org.springframework.data.jpa.repository.Query; ADD import org.springframework.data.repository.query.Param; CON import org.springframework.stereotype.Repository; @@ -10,2 +13,6 @@ CON ADD ADD /** ADD * Created by abayomi on 25/07/2019. ADD */ CON @Repository @@ -16,4 +23,23 @@ CON ADD List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); CON DEL List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); ADD ADD @Query(value = "Select distinct me from MappingEntity me JOIN me.mappingValues mv " + ADD "WHERE ((lower(me.entityType) = lower(:entityType)) OR :entityType = '') "+ ADD "AND ((KEY(mv) = :mappingLabel AND mv = :mappingValue) OR :mappingValue = '') " ADD ) ADD Page<MappingEntity> findByMultipleFilters(@Param("entityType") String entityType, ADD ADD @Param("mappingLabel") String mappingLabel, ADD @Param("mappingValue") String mappingValue, ADD ADD Pageable pageable); ADD ADD ADD ADD @Query(value = "select me from MappingEntity me JOIN me.mappingValues mv WHERE KEY(mv) = :dataKey AND mv = :dataValue ") ADD List<MappingEntity> findByAttributeAndValue(@Param("dataKey") String dataKey, ADD @Param("dataValue") String dataValue); ADD ADD CON }
<<<<<<< SEARCH import org.pdxfinder.rdbms.dao.MappingEntity; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Map; @Repository public interface MappingEntityRepository extends JpaRepository<MappingEntity, Long> { ======= import org.pdxfinder.rdbms.dao.MappingEntity; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Map; /** * Created by abayomi on 25/07/2019. */ @Repository public interface MappingEntityRepository extends JpaRepository<MappingEntity, Long> { >>>>>>> REPLACE <<<<<<< SEARCH MappingEntity findByMappingKey(String mappingKey); List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); } ======= MappingEntity findByMappingKey(String mappingKey); List<MappingEntity> findByMappedTermLabel(String mappedTermLabel); @Query(value = "Select distinct me from MappingEntity me JOIN me.mappingValues mv " + "WHERE ((lower(me.entityType) = lower(:entityType)) OR :entityType = '') "+ "AND ((KEY(mv) = :mappingLabel AND mv = :mappingValue) OR :mappingValue = '') " ) Page<MappingEntity> findByMultipleFilters(@Param("entityType") String entityType, @Param("mappingLabel") String mappingLabel, @Param("mappingValue") String mappingValue, Pageable pageable); @Query(value = "select me from MappingEntity me JOIN me.mappingValues mv WHERE KEY(mv) = :dataKey AND mv = :dataValue ") List<MappingEntity> findByAttributeAndValue(@Param("dataKey") String dataKey, @Param("dataValue") String dataValue); } >>>>>>> REPLACE
pennlabs/penn-mobile-android
b0c8c151aaab98c76f5396b93f4b2d8ef2aba40b
PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java
java
mit
Update Dining API URL and add URL methods
package com.pennapps.labs.pennmobile.api; public class DiningAPI extends API { public DiningAPI() { super(); BASE_URL = "https://37922702.ngrok.com/"; } }
package com.pennapps.labs.pennmobile.api; import android.util.Log; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; import java.io.IOException; public class DiningAPI extends API { public DiningAPI() { super(); BASE_URL = "http://api.pennlabs.org:5000/dining/"; } public JSONObject getVenues() { HttpGet httpGet = new HttpGet(BASE_URL + "venues"); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } public JSONObject getDailyMenu(String hallID) { HttpGet httpGet = new HttpGet(BASE_URL + "daily_menu/" + hallID); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } public JSONObject getWeeklyMenu(String hallID) { HttpGet httpGet = new HttpGet(BASE_URL + "weekly_menu/" + hallID); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } }
59
1
2
mixed
--- a/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java +++ b/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java @@ -2,2 +2,12 @@ + +import android.util.Log; + +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; + +import java.io.IOException; @@ -7,3 +17,51 @@ super(); - BASE_URL = "https://37922702.ngrok.com/"; + BASE_URL = "http://api.pennlabs.org:5000/dining/"; + } + + public JSONObject getVenues() { + HttpGet httpGet = new HttpGet(BASE_URL + "venues"); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } + } + + public JSONObject getDailyMenu(String hallID) { + HttpGet httpGet = new HttpGet(BASE_URL + "daily_menu/" + hallID); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } + } + + public JSONObject getWeeklyMenu(String hallID) { + HttpGet httpGet = new HttpGet(BASE_URL + "weekly_menu/" + hallID); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } }
--- a/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java +++ b/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java @@ ... @@ + +import android.util.Log; + +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; + +import java.io.IOException; @@ ... @@ super(); - BASE_URL = "https://37922702.ngrok.com/"; + BASE_URL = "http://api.pennlabs.org:5000/dining/"; + } + + public JSONObject getVenues() { + HttpGet httpGet = new HttpGet(BASE_URL + "venues"); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } + } + + public JSONObject getDailyMenu(String hallID) { + HttpGet httpGet = new HttpGet(BASE_URL + "daily_menu/" + hallID); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } + } + + public JSONObject getWeeklyMenu(String hallID) { + HttpGet httpGet = new HttpGet(BASE_URL + "weekly_menu/" + hallID); + try { + HttpResponse response = httpClient.execute(httpGet); + JSONTokener tokener = new JSONTokener(inputStreamToString + (response.getEntity().getContent()).toString()); + return new JSONObject(tokener); + } catch (IOException e) { + Log.v("vivlabs", "" + e); + return null; + } catch (JSONException e) { + Log.v("vivlabs", "" + e); + return null; + } }
--- a/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java +++ b/PennMobile/src/main/java/com/pennapps/labs/pennmobile/api/DiningAPI.java @@ -2,2 +2,12 @@ CON ADD ADD import android.util.Log; ADD ADD import org.apache.http.HttpResponse; ADD import org.apache.http.client.methods.HttpGet; ADD import org.json.JSONException; ADD import org.json.JSONObject; ADD import org.json.JSONTokener; ADD ADD import java.io.IOException; CON @@ -7,3 +17,51 @@ CON super(); DEL BASE_URL = "https://37922702.ngrok.com/"; ADD BASE_URL = "http://api.pennlabs.org:5000/dining/"; ADD } ADD ADD public JSONObject getVenues() { ADD HttpGet httpGet = new HttpGet(BASE_URL + "venues"); ADD try { ADD HttpResponse response = httpClient.execute(httpGet); ADD JSONTokener tokener = new JSONTokener(inputStreamToString ADD (response.getEntity().getContent()).toString()); ADD return new JSONObject(tokener); ADD } catch (IOException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } catch (JSONException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } ADD } ADD ADD public JSONObject getDailyMenu(String hallID) { ADD HttpGet httpGet = new HttpGet(BASE_URL + "daily_menu/" + hallID); ADD try { ADD HttpResponse response = httpClient.execute(httpGet); ADD JSONTokener tokener = new JSONTokener(inputStreamToString ADD (response.getEntity().getContent()).toString()); ADD return new JSONObject(tokener); ADD } catch (IOException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } catch (JSONException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } ADD } ADD ADD public JSONObject getWeeklyMenu(String hallID) { ADD HttpGet httpGet = new HttpGet(BASE_URL + "weekly_menu/" + hallID); ADD try { ADD HttpResponse response = httpClient.execute(httpGet); ADD JSONTokener tokener = new JSONTokener(inputStreamToString ADD (response.getEntity().getContent()).toString()); ADD return new JSONObject(tokener); ADD } catch (IOException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } catch (JSONException e) { ADD Log.v("vivlabs", "" + e); ADD return null; ADD } CON }
<<<<<<< SEARCH package com.pennapps.labs.pennmobile.api; public class DiningAPI extends API { public DiningAPI() { super(); BASE_URL = "https://37922702.ngrok.com/"; } } ======= package com.pennapps.labs.pennmobile.api; import android.util.Log; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; import java.io.IOException; public class DiningAPI extends API { public DiningAPI() { super(); BASE_URL = "http://api.pennlabs.org:5000/dining/"; } public JSONObject getVenues() { HttpGet httpGet = new HttpGet(BASE_URL + "venues"); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } public JSONObject getDailyMenu(String hallID) { HttpGet httpGet = new HttpGet(BASE_URL + "daily_menu/" + hallID); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } public JSONObject getWeeklyMenu(String hallID) { HttpGet httpGet = new HttpGet(BASE_URL + "weekly_menu/" + hallID); try { HttpResponse response = httpClient.execute(httpGet); JSONTokener tokener = new JSONTokener(inputStreamToString (response.getEntity().getContent()).toString()); return new JSONObject(tokener); } catch (IOException e) { Log.v("vivlabs", "" + e); return null; } catch (JSONException e) { Log.v("vivlabs", "" + e); return null; } } } >>>>>>> REPLACE
99designs/colorific
64e515f87fa47f44ed8c18e9c9edc76fee49ce84
setup.py
python
isc
Use patched Pillow as a packaging dependency.
# -*- coding: utf-8 -*- # # setup.py # colorific # """ Package information for colorific. """ import sys # check for the supported Python version version = tuple(sys.version_info[:2]) if version != (2, 7): sys.stderr.write('colorific requires Python 2.7 (you have %d.%d)\n' %\ version) sys.stderr.flush() sys.exit(1) import os from setuptools import setup readme = os.path.join(os.path.dirname(__file__), 'README.md') setup( name='colorific', version='0.2.0', description='Automatic color palette detection', long_description=open(readme).read(), author='Lars Yencken', author_email='[email protected]', url='http://github.com/99designs/colorific', py_modules=['colorific'], install_requires=[ 'PIL>=1.1.6', 'colormath>=1.0.8', 'numpy>=1.6.1', ], license='ISC', entry_points={ 'console_scripts': [ 'colorific = colorific:main', ], }, )
# -*- coding: utf-8 -*- # # setup.py # colorific # """ Package information for colorific. """ import sys # check for the supported Python version version = tuple(sys.version_info[:2]) if version != (2, 7): sys.stderr.write('colorific requires Python 2.7 (you have %d.%d)\n' %\ version) sys.stderr.flush() sys.exit(1) import os from setuptools import setup readme = os.path.join(os.path.dirname(__file__), 'README.md') setup( name='colorific', version='0.2.0', description='Automatic color palette detection', long_description=open(readme).read(), author='Lars Yencken', author_email='[email protected]', url='http://github.com/99designs/colorific', py_modules=['colorific'], install_requires=[ 'Pillow==1.7.8', 'colormath>=1.0.8', 'numpy>=1.6.1', ], dependency_links=[ 'http://github.com/larsyencken/Pillow/tarball/master#egg=Pillow-1.7.8', ], license='ISC', entry_points={ 'console_scripts': [ 'colorific = colorific:main', ], }, )
4
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -35,5 +35,8 @@ install_requires=[ - 'PIL>=1.1.6', + 'Pillow==1.7.8', 'colormath>=1.0.8', 'numpy>=1.6.1', + ], + dependency_links=[ + 'http://github.com/larsyencken/Pillow/tarball/master#egg=Pillow-1.7.8', ],
--- a/setup.py +++ b/setup.py @@ ... @@ install_requires=[ - 'PIL>=1.1.6', + 'Pillow==1.7.8', 'colormath>=1.0.8', 'numpy>=1.6.1', + ], + dependency_links=[ + 'http://github.com/larsyencken/Pillow/tarball/master#egg=Pillow-1.7.8', ],
--- a/setup.py +++ b/setup.py @@ -35,5 +35,8 @@ CON install_requires=[ DEL 'PIL>=1.1.6', ADD 'Pillow==1.7.8', CON 'colormath>=1.0.8', CON 'numpy>=1.6.1', ADD ], ADD dependency_links=[ ADD 'http://github.com/larsyencken/Pillow/tarball/master#egg=Pillow-1.7.8', CON ],
<<<<<<< SEARCH py_modules=['colorific'], install_requires=[ 'PIL>=1.1.6', 'colormath>=1.0.8', 'numpy>=1.6.1', ], license='ISC', ======= py_modules=['colorific'], install_requires=[ 'Pillow==1.7.8', 'colormath>=1.0.8', 'numpy>=1.6.1', ], dependency_links=[ 'http://github.com/larsyencken/Pillow/tarball/master#egg=Pillow-1.7.8', ], license='ISC', >>>>>>> REPLACE
denkmal/denkmal.org
fa5ac37b0a17c68fc56f7cd93d2406a70c9dac7c
library/Denkmal/library/Denkmal/Component/HeaderBar.js
javascript
mit
Hide weekday menu on navigate:start
/** * @class Denkmal_Component_HeaderBar * @extends Denkmal_Component_Abstract */ var Denkmal_Component_HeaderBar = Denkmal_Component_Abstract.extend({ /** @type String */ _class: 'Denkmal_Component_HeaderBar', events: { 'click .menu.dates a': function() { var state = !this.el.hasAttribute('data-weekday-menu'); this.setWeekdayMenuVisible(state); } }, /** * @param {Boolean} state */ setWeekdayMenuVisible: function(state) { var callback = function(state) { $(this).attr('data-weekday-menu', state ? '' : null); }; if (state) { this.$el.toggleModal('open', callback); } else { this.$el.toggleModal('close', callback); } }, /** * @param {Boolean} state */ setNavigationIndicationVisible: function(state) { this.$el.attr('data-navigation-indication', state ? '' : null); } });
/** * @class Denkmal_Component_HeaderBar * @extends Denkmal_Component_Abstract */ var Denkmal_Component_HeaderBar = Denkmal_Component_Abstract.extend({ /** @type String */ _class: 'Denkmal_Component_HeaderBar', events: { 'click .menu.dates a': function() { if (!this.getWeekdayMenuVisible()) { this.setWeekdayMenuVisible(true); return false; } } }, appEvents: { 'navigate:start': function() { this.setWeekdayMenuVisible(false); } }, /** * @param {Boolean} state */ setWeekdayMenuVisible: function(state) { var callback = function(state) { $(this).attr('data-weekday-menu', state ? '' : null); }; if (state) { this.$el.toggleModal('open', callback); } else { this.$el.toggleModal('close', callback); } }, /** * @returns {boolean} */ getWeekdayMenuVisible: function() { return this.el.hasAttribute('data-weekday-menu'); }, /** * @param {Boolean} state */ setNavigationIndicationVisible: function(state) { this.$el.attr('data-navigation-indication', state ? '' : null); } });
17
2
2
mixed
--- a/library/Denkmal/library/Denkmal/Component/HeaderBar.js +++ b/library/Denkmal/library/Denkmal/Component/HeaderBar.js @@ -11,4 +11,12 @@ 'click .menu.dates a': function() { - var state = !this.el.hasAttribute('data-weekday-menu'); - this.setWeekdayMenuVisible(state); + if (!this.getWeekdayMenuVisible()) { + this.setWeekdayMenuVisible(true); + return false; + } + } + }, + + appEvents: { + 'navigate:start': function() { + this.setWeekdayMenuVisible(false); } @@ -31,2 +39,9 @@ /** + * @returns {boolean} + */ + getWeekdayMenuVisible: function() { + return this.el.hasAttribute('data-weekday-menu'); + }, + + /** * @param {Boolean} state
--- a/library/Denkmal/library/Denkmal/Component/HeaderBar.js +++ b/library/Denkmal/library/Denkmal/Component/HeaderBar.js @@ ... @@ 'click .menu.dates a': function() { - var state = !this.el.hasAttribute('data-weekday-menu'); - this.setWeekdayMenuVisible(state); + if (!this.getWeekdayMenuVisible()) { + this.setWeekdayMenuVisible(true); + return false; + } + } + }, + + appEvents: { + 'navigate:start': function() { + this.setWeekdayMenuVisible(false); } @@ ... @@ /** + * @returns {boolean} + */ + getWeekdayMenuVisible: function() { + return this.el.hasAttribute('data-weekday-menu'); + }, + + /** * @param {Boolean} state
--- a/library/Denkmal/library/Denkmal/Component/HeaderBar.js +++ b/library/Denkmal/library/Denkmal/Component/HeaderBar.js @@ -11,4 +11,12 @@ CON 'click .menu.dates a': function() { DEL var state = !this.el.hasAttribute('data-weekday-menu'); DEL this.setWeekdayMenuVisible(state); ADD if (!this.getWeekdayMenuVisible()) { ADD this.setWeekdayMenuVisible(true); ADD return false; ADD } ADD } ADD }, ADD ADD appEvents: { ADD 'navigate:start': function() { ADD this.setWeekdayMenuVisible(false); CON } @@ -31,2 +39,9 @@ CON /** ADD * @returns {boolean} ADD */ ADD getWeekdayMenuVisible: function() { ADD return this.el.hasAttribute('data-weekday-menu'); ADD }, ADD ADD /** CON * @param {Boolean} state
<<<<<<< SEARCH events: { 'click .menu.dates a': function() { var state = !this.el.hasAttribute('data-weekday-menu'); this.setWeekdayMenuVisible(state); } }, ======= events: { 'click .menu.dates a': function() { if (!this.getWeekdayMenuVisible()) { this.setWeekdayMenuVisible(true); return false; } } }, appEvents: { 'navigate:start': function() { this.setWeekdayMenuVisible(false); } }, >>>>>>> REPLACE <<<<<<< SEARCH /** * @param {Boolean} state */ ======= /** * @returns {boolean} */ getWeekdayMenuVisible: function() { return this.el.hasAttribute('data-weekday-menu'); }, /** * @param {Boolean} state */ >>>>>>> REPLACE
cfpb/capital-framework
8ef7cb57c0473a2f383a7139993e9a839d835652
src/cf-tables/src/cf-table-row-links.js
javascript
cc0-1.0
Remove table row links init method
/* ========================================================================== Table Row Links Mixin for adding row link click functionality to table organism. ========================================================================== */ 'use strict'; var closest = require( 'atomic-component/src/utilities/dom-closest' ).closest; var TableRowLinks = { events: { 'click tbody tr': 'onRowLinkClick' }, ui: { base: '.o-table__row-links' }, /** * Handle a click of the table. * * @param {Object} event Mouse event for click on the table. */ onRowLinkClick: function( event ) { var target = event.target; if( target && target.tagName === 'A' ) { return } target = closest( event.target, 'tr' ); var link = target.querySelector( 'a' ); if( link ) window.location = link.getAttribute( 'href' ); }, /** * Handle initilization of Table Row Links. Added for standalone * use cases. * */ init: function() { var elements = document.querySelector( TableRowLinks.ui.base ); for ( var i = 0; i < elements.length; ++i ) { if( elements[i].hasAttribute( 'data-bound' ) === false ) { elements[i].addEventListener( 'click', table, TableRowLinks.onRowLinkClick ); } } } }; module.exports = TableRowLinks;
/* ========================================================================== Table Row Links Mixin for adding row link click functionality to table organism. ========================================================================== */ 'use strict'; var closest = require( 'atomic-component/src/utilities/dom-closest' ).closest; var TableRowLinks = { events: { 'click tbody tr': 'onRowLinkClick' }, ui: { base: '.o-table__row-links' }, /** * Handle a click of the table. * * @param {Object} event Mouse event for click on the table. */ onRowLinkClick: function( event ) { var target = event.target; if( target && target.tagName === 'A' ) { return } target = closest( event.target, 'tr' ); var link = target.querySelector( 'a' ); if( link ) window.location = link.getAttribute( 'href' ); } }; module.exports = TableRowLinks;
0
15
1
del_only
--- a/src/cf-tables/src/cf-table-row-links.js +++ b/src/cf-tables/src/cf-table-row-links.js @@ -35,17 +35,2 @@ if( link ) window.location = link.getAttribute( 'href' ); - }, - - /** - * Handle initilization of Table Row Links. Added for standalone - * use cases. - * - */ - init: function() { - var elements = document.querySelector( TableRowLinks.ui.base ); - for ( var i = 0; i < elements.length; ++i ) { - if( elements[i].hasAttribute( 'data-bound' ) === false ) { - elements[i].addEventListener( 'click', table, - TableRowLinks.onRowLinkClick ); - } - } }
--- a/src/cf-tables/src/cf-table-row-links.js +++ b/src/cf-tables/src/cf-table-row-links.js @@ ... @@ if( link ) window.location = link.getAttribute( 'href' ); - }, - - /** - * Handle initilization of Table Row Links. Added for standalone - * use cases. - * - */ - init: function() { - var elements = document.querySelector( TableRowLinks.ui.base ); - for ( var i = 0; i < elements.length; ++i ) { - if( elements[i].hasAttribute( 'data-bound' ) === false ) { - elements[i].addEventListener( 'click', table, - TableRowLinks.onRowLinkClick ); - } - } }
--- a/src/cf-tables/src/cf-table-row-links.js +++ b/src/cf-tables/src/cf-table-row-links.js @@ -35,17 +35,2 @@ CON if( link ) window.location = link.getAttribute( 'href' ); DEL }, DEL DEL /** DEL * Handle initilization of Table Row Links. Added for standalone DEL * use cases. DEL * DEL */ DEL init: function() { DEL var elements = document.querySelector( TableRowLinks.ui.base ); DEL for ( var i = 0; i < elements.length; ++i ) { DEL if( elements[i].hasAttribute( 'data-bound' ) === false ) { DEL elements[i].addEventListener( 'click', table, DEL TableRowLinks.onRowLinkClick ); DEL } DEL } CON }
<<<<<<< SEARCH var link = target.querySelector( 'a' ); if( link ) window.location = link.getAttribute( 'href' ); }, /** * Handle initilization of Table Row Links. Added for standalone * use cases. * */ init: function() { var elements = document.querySelector( TableRowLinks.ui.base ); for ( var i = 0; i < elements.length; ++i ) { if( elements[i].hasAttribute( 'data-bound' ) === false ) { elements[i].addEventListener( 'click', table, TableRowLinks.onRowLinkClick ); } } } }; ======= var link = target.querySelector( 'a' ); if( link ) window.location = link.getAttribute( 'href' ); } }; >>>>>>> REPLACE
tipsy/javalin
81eea95c5885a08fd0934ed323c38f307bc65336
src/test/java/io/javalin/TestLifecycleEvents.kt
kotlin
apache-2.0
[tests] Add test for ws handler added
/* * Javalin - https://javalin.io * Copyright 2017 David Åse * Licensed under Apache 2.0: https://github.com/tipsy/javalin/blob/master/LICENSE * */ package io.javalin import org.assertj.core.api.Assertions.assertThat import org.junit.Test class TestLifecycleEvents { @Test fun `life cycle events work`() { var log = "" Javalin.create().apply { on.serverStarting { log += "Starting" } on.serverStarted { log += "Started" } on.serverStopping { log += "Stopping" } on.serverStopping { log += "Stopping" } on.serverStopping { log += "Stopping" } on.serverStopped { log += "Stopped" } }.start(0).stop() assertThat(log).isEqualTo("StartingStartedStoppingStoppingStoppingStopped") } @Test fun `handlerAdded event works`() = TestUtil.test { app, http -> var log = "" app.on.handlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.on.handlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.get("/test-path") {} assertThat(log).isEqualTo("/test-path/test-path") } }
/* * Javalin - https://javalin.io * Copyright 2017 David Åse * Licensed under Apache 2.0: https://github.com/tipsy/javalin/blob/master/LICENSE * */ package io.javalin import org.assertj.core.api.Assertions.assertThat import org.junit.Test class TestLifecycleEvents { @Test fun `life cycle events work`() { var log = "" Javalin.create().apply { on.serverStarting { log += "Starting" } on.serverStarted { log += "Started" } on.serverStopping { log += "Stopping" } on.serverStopping { log += "Stopping" } on.serverStopping { log += "Stopping" } on.serverStopped { log += "Stopped" } }.start(0).stop() assertThat(log).isEqualTo("StartingStartedStoppingStoppingStoppingStopped") } @Test fun `handlerAdded event works`() = TestUtil.test { app, http -> var log = "" app.on.handlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.on.handlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.get("/test-path") {} assertThat(log).isEqualTo("/test-path/test-path") } @Test fun `wsHandlerAdded event works`() = TestUtil.test { app, http -> var log = "" app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.ws("/test-path-ws") {} assertThat(log).isEqualTo("/test-path-ws/test-path-ws") } }
9
0
1
add_only
--- a/src/test/java/io/javalin/TestLifecycleEvents.kt +++ b/src/test/java/io/javalin/TestLifecycleEvents.kt @@ -37,2 +37,11 @@ + @Test + fun `wsHandlerAdded event works`() = TestUtil.test { app, http -> + var log = "" + app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } + app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } + app.ws("/test-path-ws") {} + assertThat(log).isEqualTo("/test-path-ws/test-path-ws") + } + }
--- a/src/test/java/io/javalin/TestLifecycleEvents.kt +++ b/src/test/java/io/javalin/TestLifecycleEvents.kt @@ ... @@ + @Test + fun `wsHandlerAdded event works`() = TestUtil.test { app, http -> + var log = "" + app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } + app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } + app.ws("/test-path-ws") {} + assertThat(log).isEqualTo("/test-path-ws/test-path-ws") + } + }
--- a/src/test/java/io/javalin/TestLifecycleEvents.kt +++ b/src/test/java/io/javalin/TestLifecycleEvents.kt @@ -37,2 +37,11 @@ CON ADD @Test ADD fun `wsHandlerAdded event works`() = TestUtil.test { app, http -> ADD var log = "" ADD app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } ADD app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } ADD app.ws("/test-path-ws") {} ADD assertThat(log).isEqualTo("/test-path-ws/test-path-ws") ADD } ADD CON }
<<<<<<< SEARCH } } ======= } @Test fun `wsHandlerAdded event works`() = TestUtil.test { app, http -> var log = "" app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.on.wsHandlerAdded { handlerMetaInfo -> log += handlerMetaInfo.path } app.ws("/test-path-ws") {} assertThat(log).isEqualTo("/test-path-ws/test-path-ws") } } >>>>>>> REPLACE
C4K3/Portals
167e2cce98fea152f27cce2358b490c189f1f344
src/Portals.java
java
cc0-1.0
Make justTeleportedEntities a HashSet instead of ArrayList
package net.simpvp.Portals; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.bukkit.entity.Entity; import org.bukkit.plugin.java.JavaPlugin; public class Portals extends JavaPlugin { public static JavaPlugin instance; public static List<UUID> justTeleportedEntities = new ArrayList<UUID>(); public void onEnable() { instance = this; /* Check if this plugin's directory exists, if not create it */ File dir = new File("plugins/Portals"); if ( !dir.exists() ) { dir.mkdir(); } getServer().getPluginManager().registerEvents(new BlockBreak(), this); getServer().getPluginManager().registerEvents(new BlockPlace(), this); getServer().getPluginManager().registerEvents(new PlayerDeath(), this); getServer().getPluginManager().registerEvents(new PlayerToggleSneak(), this); /* Remove comment to enable minecart detection through portals */ /* getServer().getPluginManager().registerEvents(new BlockRedstone(), this); */ SQLite.connect(); } public void onDisable() { SQLite.close(); } }
package net.simpvp.Portals; import java.io.File; import java.util.UUID; import java.util.HashSet; import org.bukkit.entity.Entity; import org.bukkit.plugin.java.JavaPlugin; public class Portals extends JavaPlugin { public static JavaPlugin instance; public static HashSet<UUID> justTeleportedEntities = new HashSet<UUID>(); public void onEnable() { instance = this; /* Check if this plugin's directory exists, if not create it */ File dir = new File("plugins/Portals"); if ( !dir.exists() ) { dir.mkdir(); } getServer().getPluginManager().registerEvents(new BlockBreak(), this); getServer().getPluginManager().registerEvents(new BlockPlace(), this); getServer().getPluginManager().registerEvents(new PlayerDeath(), this); getServer().getPluginManager().registerEvents(new PlayerToggleSneak(), this); /* Remove comment to enable minecart detection through portals */ /* getServer().getPluginManager().registerEvents(new BlockRedstone(), this); */ SQLite.connect(); } public void onDisable() { SQLite.close(); } }
2
3
2
mixed
--- a/src/Portals.java +++ b/src/Portals.java @@ -3,5 +3,4 @@ import java.io.File; -import java.util.ArrayList; -import java.util.List; import java.util.UUID; +import java.util.HashSet; @@ -14,3 +13,3 @@ - public static List<UUID> justTeleportedEntities = new ArrayList<UUID>(); + public static HashSet<UUID> justTeleportedEntities = new HashSet<UUID>();
--- a/src/Portals.java +++ b/src/Portals.java @@ ... @@ import java.io.File; -import java.util.ArrayList; -import java.util.List; import java.util.UUID; +import java.util.HashSet; @@ ... @@ - public static List<UUID> justTeleportedEntities = new ArrayList<UUID>(); + public static HashSet<UUID> justTeleportedEntities = new HashSet<UUID>();
--- a/src/Portals.java +++ b/src/Portals.java @@ -3,5 +3,4 @@ CON import java.io.File; DEL import java.util.ArrayList; DEL import java.util.List; CON import java.util.UUID; ADD import java.util.HashSet; CON @@ -14,3 +13,3 @@ CON DEL public static List<UUID> justTeleportedEntities = new ArrayList<UUID>(); ADD public static HashSet<UUID> justTeleportedEntities = new HashSet<UUID>(); CON
<<<<<<< SEARCH import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.bukkit.entity.Entity; ======= import java.io.File; import java.util.UUID; import java.util.HashSet; import org.bukkit.entity.Entity; >>>>>>> REPLACE <<<<<<< SEARCH public static JavaPlugin instance; public static List<UUID> justTeleportedEntities = new ArrayList<UUID>(); public void onEnable() { ======= public static JavaPlugin instance; public static HashSet<UUID> justTeleportedEntities = new HashSet<UUID>(); public void onEnable() { >>>>>>> REPLACE
justinas/gettext
e487626b6a98f96fab4948e59a7d41aca2a4032f
src/parse_options.rs
rust
mit
Remove Debug implementation for now. DebugStruct only stabilized in 1.2.0
extern crate encoding; use std::fmt; use self::encoding::types::EncodingRef; /// ParseOptions allows setting options for parsing MO catalogs. pub struct ParseOptions { force_encoding: Option<EncodingRef>, } impl ParseOptions { /// Returns a new instance of ParseOptions with default options. pub fn new() -> Self { ParseOptions { force_encoding: None } } /// Forces a use of a specific encoding /// when parsing strings from a catalog. /// If this option is not enabled, /// the parser tries to use the encoding specified in the metadata /// or UTF-8 if metadata is non-existent. pub fn force_encoding(&mut self, encoding: EncodingRef) -> &mut Self { self.force_encoding = Some(encoding); self } } // Cannot derive as Encoding does not implement Debug. impl fmt::Debug for ParseOptions { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { let mut debug_fmt = fmt.debug_struct("ParseOptions"); match self.force_encoding { opt @ Some(_) => { debug_fmt.field("force_encoding", &opt.map(|e| e.name())); } opt @ None => { // HACK: convert Option<EncodingRef> to Option<()> with map() // to get a Debug impl. debug_fmt.field("force_encoding", &opt.map(|_| ())); } } debug_fmt.finish() } }
extern crate encoding; use std::fmt; use self::encoding::types::EncodingRef; /// ParseOptions allows setting options for parsing MO catalogs. #[allow(missing_debug_implementations)] pub struct ParseOptions { force_encoding: Option<EncodingRef>, } impl ParseOptions { /// Returns a new instance of ParseOptions with default options. pub fn new() -> Self { ParseOptions { force_encoding: None } } /// Forces a use of a specific encoding /// when parsing strings from a catalog. /// If this option is not enabled, /// the parser tries to use the encoding specified in the metadata /// or UTF-8 if metadata is non-existent. pub fn force_encoding(&mut self, encoding: EncodingRef) -> &mut Self { self.force_encoding = Some(encoding); self } }
1
18
2
mixed
--- a/src/parse_options.rs +++ b/src/parse_options.rs @@ -7,2 +7,3 @@ /// ParseOptions allows setting options for parsing MO catalogs. +#[allow(missing_debug_implementations)] pub struct ParseOptions { @@ -27,19 +28 @@ } - -// Cannot derive as Encoding does not implement Debug. -impl fmt::Debug for ParseOptions { - fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { - let mut debug_fmt = fmt.debug_struct("ParseOptions"); - match self.force_encoding { - opt @ Some(_) => { - debug_fmt.field("force_encoding", &opt.map(|e| e.name())); - } - opt @ None => { - // HACK: convert Option<EncodingRef> to Option<()> with map() - // to get a Debug impl. - debug_fmt.field("force_encoding", &opt.map(|_| ())); - } - } - debug_fmt.finish() - } -}
--- a/src/parse_options.rs +++ b/src/parse_options.rs @@ ... @@ /// ParseOptions allows setting options for parsing MO catalogs. +#[allow(missing_debug_implementations)] pub struct ParseOptions { @@ ... @@ } - -// Cannot derive as Encoding does not implement Debug. -impl fmt::Debug for ParseOptions { - fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { - let mut debug_fmt = fmt.debug_struct("ParseOptions"); - match self.force_encoding { - opt @ Some(_) => { - debug_fmt.field("force_encoding", &opt.map(|e| e.name())); - } - opt @ None => { - // HACK: convert Option<EncodingRef> to Option<()> with map() - // to get a Debug impl. - debug_fmt.field("force_encoding", &opt.map(|_| ())); - } - } - debug_fmt.finish() - } -}
--- a/src/parse_options.rs +++ b/src/parse_options.rs @@ -7,2 +7,3 @@ CON /// ParseOptions allows setting options for parsing MO catalogs. ADD #[allow(missing_debug_implementations)] CON pub struct ParseOptions { @@ -27,19 +28 @@ CON } DEL DEL // Cannot derive as Encoding does not implement Debug. DEL impl fmt::Debug for ParseOptions { DEL fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { DEL let mut debug_fmt = fmt.debug_struct("ParseOptions"); DEL match self.force_encoding { DEL opt @ Some(_) => { DEL debug_fmt.field("force_encoding", &opt.map(|e| e.name())); DEL } DEL opt @ None => { DEL // HACK: convert Option<EncodingRef> to Option<()> with map() DEL // to get a Debug impl. DEL debug_fmt.field("force_encoding", &opt.map(|_| ())); DEL } DEL } DEL debug_fmt.finish() DEL } DEL }
<<<<<<< SEARCH /// ParseOptions allows setting options for parsing MO catalogs. pub struct ParseOptions { force_encoding: Option<EncodingRef>, ======= /// ParseOptions allows setting options for parsing MO catalogs. #[allow(missing_debug_implementations)] pub struct ParseOptions { force_encoding: Option<EncodingRef>, >>>>>>> REPLACE <<<<<<< SEARCH } } // Cannot derive as Encoding does not implement Debug. impl fmt::Debug for ParseOptions { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { let mut debug_fmt = fmt.debug_struct("ParseOptions"); match self.force_encoding { opt @ Some(_) => { debug_fmt.field("force_encoding", &opt.map(|e| e.name())); } opt @ None => { // HACK: convert Option<EncodingRef> to Option<()> with map() // to get a Debug impl. debug_fmt.field("force_encoding", &opt.map(|_| ())); } } debug_fmt.finish() } } ======= } } >>>>>>> REPLACE