repo
stringlengths 8
50
| commit
stringlengths 40
40
| path
stringlengths 5
171
| lang
stringclasses 5
values | license
stringclasses 13
values | message
stringlengths 21
1.33k
| old_code
stringlengths 15
2.4k
| new_code
stringlengths 140
2.61k
| n_added
int64 0
81
| n_removed
int64 0
58
| n_hunks
int64 1
8
| change_kind
stringclasses 3
values | udiff
stringlengths 88
3.33k
| udiff-h
stringlengths 85
3.32k
| udiff-l
stringlengths 95
3.57k
| search-replace
stringlengths 89
3.36k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
pascalduez/react-module-boilerplate
|
37c5841c8d92254144b24dfff8cca8b2735f95aa
|
.storybook/webpack.config.js
|
javascript
|
unlicense
|
Make Storybook work with babel 7
|
/* eslint-disable no-param-reassign, global-require */
module.exports = baseConfig => {
baseConfig.module.rules.push({
test: /\.css$/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[name]-[local]_[hash:base64:5]',
importLoaders: 1,
},
},
{
loader: 'postcss-loader',
options: {
plugins: [require('../src/theme')({ appendVariables: true })],
},
},
],
});
return baseConfig;
};
|
/* eslint-disable no-param-reassign, global-require */
module.exports = baseConfig => {
// Replace storybook baseConfig rule.
baseConfig.module.rules.splice(0, 1, {
test: /\.js$/,
exclude: /node_modules/,
use: [
{
loader: 'babel-loader',
options: {
presets: ['./babel.config.js'],
},
},
],
});
baseConfig.module.rules.push({
test: /\.css$/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[name]-[local]_[hash:base64:5]',
importLoaders: 1,
},
},
{
loader: 'postcss-loader',
options: {
plugins: [require('../src/theme')({ appendVariables: true })],
},
},
],
});
return baseConfig;
};
| 14
| 0
| 1
|
add_only
|
--- a/.storybook/webpack.config.js
+++ b/.storybook/webpack.config.js
@@ -3,2 +3,16 @@
module.exports = baseConfig => {
+ // Replace storybook baseConfig rule.
+ baseConfig.module.rules.splice(0, 1, {
+ test: /\.js$/,
+ exclude: /node_modules/,
+ use: [
+ {
+ loader: 'babel-loader',
+ options: {
+ presets: ['./babel.config.js'],
+ },
+ },
+ ],
+ });
+
baseConfig.module.rules.push({
|
--- a/.storybook/webpack.config.js
+++ b/.storybook/webpack.config.js
@@ ... @@
module.exports = baseConfig => {
+ // Replace storybook baseConfig rule.
+ baseConfig.module.rules.splice(0, 1, {
+ test: /\.js$/,
+ exclude: /node_modules/,
+ use: [
+ {
+ loader: 'babel-loader',
+ options: {
+ presets: ['./babel.config.js'],
+ },
+ },
+ ],
+ });
+
baseConfig.module.rules.push({
|
--- a/.storybook/webpack.config.js
+++ b/.storybook/webpack.config.js
@@ -3,2 +3,16 @@
CON module.exports = baseConfig => {
ADD // Replace storybook baseConfig rule.
ADD baseConfig.module.rules.splice(0, 1, {
ADD test: /\.js$/,
ADD exclude: /node_modules/,
ADD use: [
ADD {
ADD loader: 'babel-loader',
ADD options: {
ADD presets: ['./babel.config.js'],
ADD },
ADD },
ADD ],
ADD });
ADD
CON baseConfig.module.rules.push({
|
<<<<<<< SEARCH
module.exports = baseConfig => {
baseConfig.module.rules.push({
test: /\.css$/,
=======
module.exports = baseConfig => {
// Replace storybook baseConfig rule.
baseConfig.module.rules.splice(0, 1, {
test: /\.js$/,
exclude: /node_modules/,
use: [
{
loader: 'babel-loader',
options: {
presets: ['./babel.config.js'],
},
},
],
});
baseConfig.module.rules.push({
test: /\.css$/,
>>>>>>> REPLACE
|
dmitriiabramov/esfmt
|
66f4b95e5c9ff60668df9e9446794ef1609f702f
|
__tests__/code_snippets/objects.js
|
javascript
|
bsd-3-clause
|
Add test for multiline object destructing
|
/* eslint-disable */
// input: property access
a['a'];
a[a];
a[b()];
a[b[c[0]]];
'abc'[1];
// output:
a['a'];
a[a];
a[b()];
a[b[c[0]]];
'abc'[1];
// input: declaration
// config: {"max-len": 30}
let a = {
b: function() {
return c;
},
c: a.b.c.d.e.f,
d: 1,
e: 'abc',
f: this,
[a]: undefined
};
// output:
let a = {
b: function() {
return c;
},
c: a.b.c.d.e.f,
d: 1,
e: 'abc',
f: this,
[a]: undefined
};
// input: destructuring
let a = {b, c, d};
// output:
let a = {b, c, d};
// input: one line objects
let a = {a: 1, b: 2};
// output:
let a = {a: 1, b: 2};
|
/* eslint-disable */
// input: property access
a['a'];
a[a];
a[b()];
a[b[c[0]]];
'abc'[1];
// output:
a['a'];
a[a];
a[b()];
a[b[c[0]]];
'abc'[1];
// input: declaration
// config: {"max-len": 30}
let a = {
b: function() {
return c;
},
c: a.b.c.d.e.f,
d: 1,
e: 'abc',
f: this,
[a]: undefined
};
// output:
let a = {
b: function() {
return c;
},
c: a.b.c.d.e.f,
d: 1,
e: 'abc',
f: this,
[a]: undefined
};
// input: destructuring
let a = {b, c, d};
// output:
let a = {b, c, d};
// input: multiline destructuring
// config: {"max-len": 30}
let a = {
aaaaaaaaaa,
bbbbbbbbbb,
dddddddddd
};
// output:
let a = {
aaaaaaaaaa,
bbbbbbbbbb,
dddddddddd
};
// input: one line objects
let a = {a: 1, b: 2};
// output:
let a = {a: 1, b: 2};
| 14
| 0
| 1
|
add_only
|
--- a/__tests__/code_snippets/objects.js
+++ b/__tests__/code_snippets/objects.js
@@ -45,2 +45,16 @@
+// input: multiline destructuring
+// config: {"max-len": 30}
+let a = {
+ aaaaaaaaaa,
+ bbbbbbbbbb,
+ dddddddddd
+};
+// output:
+let a = {
+ aaaaaaaaaa,
+ bbbbbbbbbb,
+ dddddddddd
+};
+
// input: one line objects
|
--- a/__tests__/code_snippets/objects.js
+++ b/__tests__/code_snippets/objects.js
@@ ... @@
+// input: multiline destructuring
+// config: {"max-len": 30}
+let a = {
+ aaaaaaaaaa,
+ bbbbbbbbbb,
+ dddddddddd
+};
+// output:
+let a = {
+ aaaaaaaaaa,
+ bbbbbbbbbb,
+ dddddddddd
+};
+
// input: one line objects
|
--- a/__tests__/code_snippets/objects.js
+++ b/__tests__/code_snippets/objects.js
@@ -45,2 +45,16 @@
CON
ADD // input: multiline destructuring
ADD // config: {"max-len": 30}
ADD let a = {
ADD aaaaaaaaaa,
ADD bbbbbbbbbb,
ADD dddddddddd
ADD };
ADD // output:
ADD let a = {
ADD aaaaaaaaaa,
ADD bbbbbbbbbb,
ADD dddddddddd
ADD };
ADD
CON // input: one line objects
|
<<<<<<< SEARCH
let a = {b, c, d};
// input: one line objects
let a = {a: 1, b: 2};
=======
let a = {b, c, d};
// input: multiline destructuring
// config: {"max-len": 30}
let a = {
aaaaaaaaaa,
bbbbbbbbbb,
dddddddddd
};
// output:
let a = {
aaaaaaaaaa,
bbbbbbbbbb,
dddddddddd
};
// input: one line objects
let a = {a: 1, b: 2};
>>>>>>> REPLACE
|
AustinRochford/s3img-ipython-magic
|
b374221d8d0e902494066d666570c1a882c962bc
|
s3img_magic.py
|
python
|
mit
|
Add magic to save a Matplotlib figure to S3
|
from IPython.display import Image
import boto
def parse_s3_uri(uri):
if uri.startswith('s3://'):
uri = uri[5:]
return uri.split('/', 1)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
return bucket.get_key(key_name)
def s3img(uri):
key = get_s3_key(uri)
data = key.get_contents_as_string()
return Image(data=data)
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
|
from StringIO import StringIO
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.display import Image
import boto
def parse_s3_uri(uri):
if uri.startswith('s3://'):
uri = uri[5:]
return uri.split('/', 1)
def get_s3_bucket(bucket_name):
conn = boto.connect_s3()
return conn.get_bucket(bucket_name)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.get_key(key_name)
def get_or_create_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.new_key(key_name)
def s3img(uri):
key = get_s3_key(uri)
data = key.get_contents_as_string()
return Image(data=data)
@magics_class
class S3ImageSaver(Magics):
@line_magic
def s3img_save(self, line):
"""BEWARE: this magic will happily overwrite any S3 URI"""
fig_name, uri = line.split(' ', 1)
fig = self.shell.ev(fig_name)
tmp = StringIO()
fig.savefig(tmp)
key = get_or_create_s3_key(uri)
key.set_contents_from_string(tmp.getvalue())
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
ipython.register_magics(S3ImageSaver)
| 34
| 3
| 4
|
mixed
|
--- a/s3img_magic.py
+++ b/s3img_magic.py
@@ -1 +1,4 @@
+from StringIO import StringIO
+
+from IPython.core.magic import Magics, magics_class, line_magic
from IPython.display import Image
@@ -3,2 +6,3 @@
import boto
+
@@ -11,9 +15,20 @@
+def get_s3_bucket(bucket_name):
+ conn = boto.connect_s3()
+
+ return conn.get_bucket(bucket_name)
+
+
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
-
- conn = boto.connect_s3()
- bucket = conn.get_bucket(bucket_name)
+ bucket = get_s3_bucket(bucket_name)
return bucket.get_key(key_name)
+
+
+def get_or_create_s3_key(uri):
+ bucket_name, key_name = parse_s3_uri(uri)
+ bucket = get_s3_bucket(bucket_name)
+
+ return bucket.new_key(key_name)
@@ -27,3 +42,19 @@
+@magics_class
+class S3ImageSaver(Magics):
+ @line_magic
+ def s3img_save(self, line):
+ """BEWARE: this magic will happily overwrite any S3 URI"""
+ fig_name, uri = line.split(' ', 1)
+
+ fig = self.shell.ev(fig_name)
+ tmp = StringIO()
+ fig.savefig(tmp)
+
+ key = get_or_create_s3_key(uri)
+ key.set_contents_from_string(tmp.getvalue())
+
+
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
+ ipython.register_magics(S3ImageSaver)
|
--- a/s3img_magic.py
+++ b/s3img_magic.py
@@ ... @@
+from StringIO import StringIO
+
+from IPython.core.magic import Magics, magics_class, line_magic
from IPython.display import Image
@@ ... @@
import boto
+
@@ ... @@
+def get_s3_bucket(bucket_name):
+ conn = boto.connect_s3()
+
+ return conn.get_bucket(bucket_name)
+
+
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
-
- conn = boto.connect_s3()
- bucket = conn.get_bucket(bucket_name)
+ bucket = get_s3_bucket(bucket_name)
return bucket.get_key(key_name)
+
+
+def get_or_create_s3_key(uri):
+ bucket_name, key_name = parse_s3_uri(uri)
+ bucket = get_s3_bucket(bucket_name)
+
+ return bucket.new_key(key_name)
@@ ... @@
+@magics_class
+class S3ImageSaver(Magics):
+ @line_magic
+ def s3img_save(self, line):
+ """BEWARE: this magic will happily overwrite any S3 URI"""
+ fig_name, uri = line.split(' ', 1)
+
+ fig = self.shell.ev(fig_name)
+ tmp = StringIO()
+ fig.savefig(tmp)
+
+ key = get_or_create_s3_key(uri)
+ key.set_contents_from_string(tmp.getvalue())
+
+
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
+ ipython.register_magics(S3ImageSaver)
|
--- a/s3img_magic.py
+++ b/s3img_magic.py
@@ -1 +1,4 @@
ADD from StringIO import StringIO
ADD
ADD from IPython.core.magic import Magics, magics_class, line_magic
CON from IPython.display import Image
@@ -3,2 +6,3 @@
CON import boto
ADD
CON
@@ -11,9 +15,20 @@
CON
ADD def get_s3_bucket(bucket_name):
ADD conn = boto.connect_s3()
ADD
ADD return conn.get_bucket(bucket_name)
ADD
ADD
CON def get_s3_key(uri):
CON bucket_name, key_name = parse_s3_uri(uri)
DEL
DEL conn = boto.connect_s3()
DEL bucket = conn.get_bucket(bucket_name)
ADD bucket = get_s3_bucket(bucket_name)
CON
CON return bucket.get_key(key_name)
ADD
ADD
ADD def get_or_create_s3_key(uri):
ADD bucket_name, key_name = parse_s3_uri(uri)
ADD bucket = get_s3_bucket(bucket_name)
ADD
ADD return bucket.new_key(key_name)
CON
@@ -27,3 +42,19 @@
CON
ADD @magics_class
ADD class S3ImageSaver(Magics):
ADD @line_magic
ADD def s3img_save(self, line):
ADD """BEWARE: this magic will happily overwrite any S3 URI"""
ADD fig_name, uri = line.split(' ', 1)
ADD
ADD fig = self.shell.ev(fig_name)
ADD tmp = StringIO()
ADD fig.savefig(tmp)
ADD
ADD key = get_or_create_s3_key(uri)
ADD key.set_contents_from_string(tmp.getvalue())
ADD
ADD
CON def load_ipython_extension(ipython):
CON ipython.register_magic_function(s3img, 'line')
ADD ipython.register_magics(S3ImageSaver)
|
<<<<<<< SEARCH
from IPython.display import Image
import boto
def parse_s3_uri(uri):
=======
from StringIO import StringIO
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.display import Image
import boto
def parse_s3_uri(uri):
>>>>>>> REPLACE
<<<<<<< SEARCH
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
return bucket.get_key(key_name)
=======
def get_s3_bucket(bucket_name):
conn = boto.connect_s3()
return conn.get_bucket(bucket_name)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.get_key(key_name)
def get_or_create_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.new_key(key_name)
>>>>>>> REPLACE
<<<<<<< SEARCH
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
=======
@magics_class
class S3ImageSaver(Magics):
@line_magic
def s3img_save(self, line):
"""BEWARE: this magic will happily overwrite any S3 URI"""
fig_name, uri = line.split(' ', 1)
fig = self.shell.ev(fig_name)
tmp = StringIO()
fig.savefig(tmp)
key = get_or_create_s3_key(uri)
key.set_contents_from_string(tmp.getvalue())
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
ipython.register_magics(S3ImageSaver)
>>>>>>> REPLACE
|
2017398956/picasso
|
97b5bc0257697856dbb144379b9e85346d4c2dfd
|
picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
|
java
|
apache-2.0
|
Simplify menu debugging logic a bit.
|
package com.squareup.picasso.sample;
import android.app.Activity;
import android.os.Bundle;
import android.os.StrictMode;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ListView;
import com.squareup.picasso.Picasso;
public class SampleActivity extends Activity {
private SampleAdapter adapter;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
StrictMode.setThreadPolicy(
new StrictMode.ThreadPolicy.Builder().detectAll().penaltyLog().build());
adapter = new SampleAdapter(this);
ListView lv = new ListView(this);
lv.setAdapter(adapter);
setContentView(lv);
}
@Override public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == 0) {
item.setChecked(!item.isChecked());
Picasso.with(this).setDebugging(item.isChecked());
adapter.notifyDataSetChanged();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override public boolean onCreateOptionsMenu(Menu menu) {
MenuItem debugItem = menu.add(0, 0, 0, "Debugging");
debugItem.setCheckable(true);
debugItem.setChecked(Picasso.with(this).isDebugging());
return super.onCreateOptionsMenu(menu);
}
}
|
package com.squareup.picasso.sample;
import android.app.Activity;
import android.os.Bundle;
import android.os.StrictMode;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ListView;
import com.squareup.picasso.Picasso;
public class SampleActivity extends Activity {
private SampleAdapter adapter;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
StrictMode.setThreadPolicy(
new StrictMode.ThreadPolicy.Builder().detectAll().penaltyLog().build());
adapter = new SampleAdapter(this);
ListView lv = new ListView(this);
lv.setAdapter(adapter);
setContentView(lv);
}
@Override public boolean onCreateOptionsMenu(Menu menu) {
menu.add("Debugging")
.setCheckable(true)
.setChecked(Picasso.with(this).isDebugging())
.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override public boolean onMenuItemClick(MenuItem item) {
item.setChecked(!item.isChecked());
Picasso.with(SampleActivity.this).setDebugging(item.isChecked());
adapter.notifyDataSetChanged();
return true;
}
});
return true;
}
}
| 12
| 15
| 1
|
mixed
|
--- a/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
+++ b/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
@@ -27,18 +27,15 @@
- @Override public boolean onOptionsItemSelected(MenuItem item) {
- if (item.getItemId() == 0) {
- item.setChecked(!item.isChecked());
-
- Picasso.with(this).setDebugging(item.isChecked());
- adapter.notifyDataSetChanged();
- return true;
- }
- return super.onOptionsItemSelected(item);
- }
-
@Override public boolean onCreateOptionsMenu(Menu menu) {
- MenuItem debugItem = menu.add(0, 0, 0, "Debugging");
- debugItem.setCheckable(true);
- debugItem.setChecked(Picasso.with(this).isDebugging());
- return super.onCreateOptionsMenu(menu);
+ menu.add("Debugging")
+ .setCheckable(true)
+ .setChecked(Picasso.with(this).isDebugging())
+ .setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
+ @Override public boolean onMenuItemClick(MenuItem item) {
+ item.setChecked(!item.isChecked());
+ Picasso.with(SampleActivity.this).setDebugging(item.isChecked());
+ adapter.notifyDataSetChanged();
+ return true;
+ }
+ });
+ return true;
}
|
--- a/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
+++ b/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
@@ ... @@
- @Override public boolean onOptionsItemSelected(MenuItem item) {
- if (item.getItemId() == 0) {
- item.setChecked(!item.isChecked());
-
- Picasso.with(this).setDebugging(item.isChecked());
- adapter.notifyDataSetChanged();
- return true;
- }
- return super.onOptionsItemSelected(item);
- }
-
@Override public boolean onCreateOptionsMenu(Menu menu) {
- MenuItem debugItem = menu.add(0, 0, 0, "Debugging");
- debugItem.setCheckable(true);
- debugItem.setChecked(Picasso.with(this).isDebugging());
- return super.onCreateOptionsMenu(menu);
+ menu.add("Debugging")
+ .setCheckable(true)
+ .setChecked(Picasso.with(this).isDebugging())
+ .setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
+ @Override public boolean onMenuItemClick(MenuItem item) {
+ item.setChecked(!item.isChecked());
+ Picasso.with(SampleActivity.this).setDebugging(item.isChecked());
+ adapter.notifyDataSetChanged();
+ return true;
+ }
+ });
+ return true;
}
|
--- a/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
+++ b/picasso-sample/src/main/java/com/squareup/picasso/sample/SampleActivity.java
@@ -27,18 +27,15 @@
CON
DEL @Override public boolean onOptionsItemSelected(MenuItem item) {
DEL if (item.getItemId() == 0) {
DEL item.setChecked(!item.isChecked());
DEL
DEL Picasso.with(this).setDebugging(item.isChecked());
DEL adapter.notifyDataSetChanged();
DEL return true;
DEL }
DEL return super.onOptionsItemSelected(item);
DEL }
DEL
CON @Override public boolean onCreateOptionsMenu(Menu menu) {
DEL MenuItem debugItem = menu.add(0, 0, 0, "Debugging");
DEL debugItem.setCheckable(true);
DEL debugItem.setChecked(Picasso.with(this).isDebugging());
DEL return super.onCreateOptionsMenu(menu);
ADD menu.add("Debugging")
ADD .setCheckable(true)
ADD .setChecked(Picasso.with(this).isDebugging())
ADD .setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
ADD @Override public boolean onMenuItemClick(MenuItem item) {
ADD item.setChecked(!item.isChecked());
ADD Picasso.with(SampleActivity.this).setDebugging(item.isChecked());
ADD adapter.notifyDataSetChanged();
ADD return true;
ADD }
ADD });
ADD return true;
CON }
|
<<<<<<< SEARCH
}
@Override public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == 0) {
item.setChecked(!item.isChecked());
Picasso.with(this).setDebugging(item.isChecked());
adapter.notifyDataSetChanged();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override public boolean onCreateOptionsMenu(Menu menu) {
MenuItem debugItem = menu.add(0, 0, 0, "Debugging");
debugItem.setCheckable(true);
debugItem.setChecked(Picasso.with(this).isDebugging());
return super.onCreateOptionsMenu(menu);
}
}
=======
}
@Override public boolean onCreateOptionsMenu(Menu menu) {
menu.add("Debugging")
.setCheckable(true)
.setChecked(Picasso.with(this).isDebugging())
.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override public boolean onMenuItemClick(MenuItem item) {
item.setChecked(!item.isChecked());
Picasso.with(SampleActivity.this).setDebugging(item.isChecked());
adapter.notifyDataSetChanged();
return true;
}
});
return true;
}
}
>>>>>>> REPLACE
|
jeorme/OG-Platform
|
fb5c62d52bd62c29a826c5a80682f01df414f649
|
projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
|
java
|
apache-2.0
|
Handle no currency properly, as well as multiple currency
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import java.util.Collection;
import java.util.Collections;
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<String> {
private static final String NAME = "Currency";
@Override
public String classifyPosition(Position position) {
try {
return FinancialSecurityUtils.getCurrency(position.getSecurity()).toString();
} catch (UnsupportedOperationException ex) {
return "No or multiple currencies";
}
}
public String getName() {
return NAME;
}
@Override
public Collection<String> getRequiredEntries() {
return Collections.emptyList();
}
}
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import java.util.Collection;
import java.util.Collections;
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<String> {
private static final String NAME = "Currency";
private static final String NO_CURRENCY = "No or multiple currencies";
@Override
public String classifyPosition(Position position) {
try {
Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
if (currency == null) {
return NO_CURRENCY;
}
return currency.toString();
} catch (UnsupportedOperationException ex) {
return NO_CURRENCY;
}
}
public String getName() {
return NAME;
}
@Override
public Collection<String> getRequiredEntries() {
return Collections.emptyList();
}
}
| 8
| 2
| 3
|
mixed
|
--- a/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
+++ b/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
@@ -12,2 +12,3 @@
import com.opengamma.financial.security.FinancialSecurityUtils;
+import com.opengamma.util.money.Currency;
/**
@@ -19,2 +20,3 @@
private static final String NAME = "Currency";
+ private static final String NO_CURRENCY = "No or multiple currencies";
@@ -23,5 +25,9 @@
try {
- return FinancialSecurityUtils.getCurrency(position.getSecurity()).toString();
+ Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
+ if (currency == null) {
+ return NO_CURRENCY;
+ }
+ return currency.toString();
} catch (UnsupportedOperationException ex) {
- return "No or multiple currencies";
+ return NO_CURRENCY;
}
|
--- a/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
+++ b/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
@@ ... @@
import com.opengamma.financial.security.FinancialSecurityUtils;
+import com.opengamma.util.money.Currency;
/**
@@ ... @@
private static final String NAME = "Currency";
+ private static final String NO_CURRENCY = "No or multiple currencies";
@@ ... @@
try {
- return FinancialSecurityUtils.getCurrency(position.getSecurity()).toString();
+ Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
+ if (currency == null) {
+ return NO_CURRENCY;
+ }
+ return currency.toString();
} catch (UnsupportedOperationException ex) {
- return "No or multiple currencies";
+ return NO_CURRENCY;
}
|
--- a/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
+++ b/projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
@@ -12,2 +12,3 @@
CON import com.opengamma.financial.security.FinancialSecurityUtils;
ADD import com.opengamma.util.money.Currency;
CON /**
@@ -19,2 +20,3 @@
CON private static final String NAME = "Currency";
ADD private static final String NO_CURRENCY = "No or multiple currencies";
CON
@@ -23,5 +25,9 @@
CON try {
DEL return FinancialSecurityUtils.getCurrency(position.getSecurity()).toString();
ADD Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
ADD if (currency == null) {
ADD return NO_CURRENCY;
ADD }
ADD return currency.toString();
CON } catch (UnsupportedOperationException ex) {
DEL return "No or multiple currencies";
ADD return NO_CURRENCY;
CON }
|
<<<<<<< SEARCH
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
/**
* Function to classify positions by Currency.
=======
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
>>>>>>> REPLACE
<<<<<<< SEARCH
private static final String NAME = "Currency";
@Override
public String classifyPosition(Position position) {
try {
return FinancialSecurityUtils.getCurrency(position.getSecurity()).toString();
} catch (UnsupportedOperationException ex) {
return "No or multiple currencies";
}
}
=======
private static final String NAME = "Currency";
private static final String NO_CURRENCY = "No or multiple currencies";
@Override
public String classifyPosition(Position position) {
try {
Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
if (currency == null) {
return NO_CURRENCY;
}
return currency.toString();
} catch (UnsupportedOperationException ex) {
return NO_CURRENCY;
}
}
>>>>>>> REPLACE
|
pamods/planetary-annihilation-ui-mods
|
91ed82ae5a49c06f92ad40658c0c5a407ae43884
|
ui_mod_list.js
|
javascript
|
mit
|
Add reminder timer to the default mod list.
|
var rModsList = [];
/* start ui_mod_list */
var global_mod_list = [
];
var scene_mod_list = {'connect_to_game': [
],'game_over': [
],
'icon_atlas': [
],
'live_game': [
//In game timer
'../../mods/dTimer/dTimer.css',
'../../mods/dTimer/dTimer.js',
//Mex/Energy Count
'../../mods/dMexCount/dMexCount.css',
'../../mods/dMexCount/dMexCount.js',
//Better System view (show system view at all times)
'../../mods/dBetterSystemView/dBetterSystemView.css',
'../../mods/dBetterSystemView/dBetterSystemView.js',
],
'load_planet': [
],
'lobby': [
],
'matchmaking': [
],
'new_game': [
],
'server_browser': [
],
'settings': [
],
'special_icon_atlas': [
],
'start': [
],
'system_editor': [
],
'transit': [
]
}
/* end ui_mod_list */
|
var rModsList = [];
/* start ui_mod_list */
var global_mod_list = [
];
var scene_mod_list = {'connect_to_game': [
],'game_over': [
],
'icon_atlas': [
],
'live_game': [
//In game timer
'../../mods/dTimer/dTimer.css',
'../../mods/dTimer/dTimer.js',
//Mex/Energy Count
'../../mods/dMexCount/dMexCount.css',
'../../mods/dMexCount/dMexCount.js',
//Better System view (show system view at all times)
'../../mods/dBetterSystemView/dBetterSystemView.css',
'../../mods/dBetterSystemView/dBetterSystemView.js',
//Reminders
'../../mods/dReminderTimer/dReminderTimer.css',
'../../mods/dReminderTimer/dReminderTimer.js',
],
'load_planet': [
],
'lobby': [
],
'matchmaking': [
],
'new_game': [
],
'server_browser': [
],
'settings': [
],
'special_icon_atlas': [
],
'start': [
],
'system_editor': [
],
'transit': [
]
}
/* end ui_mod_list */
| 3
| 0
| 1
|
add_only
|
--- a/ui_mod_list.js
+++ b/ui_mod_list.js
@@ -28,2 +28,5 @@
+ //Reminders
+ '../../mods/dReminderTimer/dReminderTimer.css',
+ '../../mods/dReminderTimer/dReminderTimer.js',
],
|
--- a/ui_mod_list.js
+++ b/ui_mod_list.js
@@ ... @@
+ //Reminders
+ '../../mods/dReminderTimer/dReminderTimer.css',
+ '../../mods/dReminderTimer/dReminderTimer.js',
],
|
--- a/ui_mod_list.js
+++ b/ui_mod_list.js
@@ -28,2 +28,5 @@
CON
ADD //Reminders
ADD '../../mods/dReminderTimer/dReminderTimer.css',
ADD '../../mods/dReminderTimer/dReminderTimer.js',
CON ],
|
<<<<<<< SEARCH
'../../mods/dBetterSystemView/dBetterSystemView.js',
],
'load_planet': [
=======
'../../mods/dBetterSystemView/dBetterSystemView.js',
//Reminders
'../../mods/dReminderTimer/dReminderTimer.css',
'../../mods/dReminderTimer/dReminderTimer.js',
],
'load_planet': [
>>>>>>> REPLACE
|
dtolnay/cxx
|
fa66e2afa6f43034a564ded0f4ee8a6555f7160d
|
cmd/src/main.rs
|
rust
|
apache-2.0
|
Customize usage message of cmd
|
mod gen;
mod syntax;
use gen::include;
use std::io::{self, Write};
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
#[structopt(name = "cxxbridge", author)]
struct Opt {
/// Input Rust source file containing #[cxx::bridge]
#[structopt(parse(from_os_str), required_unless = "header")]
input: Option<PathBuf>,
/// Emit header with declarations only
#[structopt(long)]
header: bool,
}
fn write(content: impl AsRef<[u8]>) {
let _ = io::stdout().lock().write_all(content.as_ref());
}
fn main() {
let opt = Opt::from_args();
match (opt.input, opt.header) {
(Some(input), true) => write(gen::do_generate_header(&input)),
(Some(input), false) => write(gen::do_generate_bridge(&input)),
(None, true) => write(include::HEADER),
(None, false) => unreachable!(), // enforced by required_unless
}
}
|
mod gen;
mod syntax;
use gen::include;
use std::io::{self, Write};
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
#[structopt(
name = "cxxbridge",
author,
about = "https://github.com/dtolnay/cxx",
usage = "\
cxxbridge <input>.rs Emit .cc file for bridge to stdout
cxxbridge <input>.rs --header Emit .h file for bridge to stdout
cxxbridge --header Emit cxxbridge.h header to stdout",
help_message = "Print help information",
version_message = "Print version information"
)]
struct Opt {
/// Input Rust source file containing #[cxx::bridge]
#[structopt(parse(from_os_str), required_unless = "header")]
input: Option<PathBuf>,
/// Emit header with declarations only
#[structopt(long)]
header: bool,
}
fn write(content: impl AsRef<[u8]>) {
let _ = io::stdout().lock().write_all(content.as_ref());
}
fn main() {
let opt = Opt::from_args();
match (opt.input, opt.header) {
(Some(input), true) => write(gen::do_generate_header(&input)),
(Some(input), false) => write(gen::do_generate_bridge(&input)),
(None, true) => write(include::HEADER),
(None, false) => unreachable!(), // enforced by required_unless
}
}
| 11
| 1
| 1
|
mixed
|
--- a/cmd/src/main.rs
+++ b/cmd/src/main.rs
@@ -9,3 +9,13 @@
#[derive(StructOpt, Debug)]
-#[structopt(name = "cxxbridge", author)]
+#[structopt(
+ name = "cxxbridge",
+ author,
+ about = "https://github.com/dtolnay/cxx",
+ usage = "\
+ cxxbridge <input>.rs Emit .cc file for bridge to stdout
+ cxxbridge <input>.rs --header Emit .h file for bridge to stdout
+ cxxbridge --header Emit cxxbridge.h header to stdout",
+ help_message = "Print help information",
+ version_message = "Print version information"
+)]
struct Opt {
|
--- a/cmd/src/main.rs
+++ b/cmd/src/main.rs
@@ ... @@
#[derive(StructOpt, Debug)]
-#[structopt(name = "cxxbridge", author)]
+#[structopt(
+ name = "cxxbridge",
+ author,
+ about = "https://github.com/dtolnay/cxx",
+ usage = "\
+ cxxbridge <input>.rs Emit .cc file for bridge to stdout
+ cxxbridge <input>.rs --header Emit .h file for bridge to stdout
+ cxxbridge --header Emit cxxbridge.h header to stdout",
+ help_message = "Print help information",
+ version_message = "Print version information"
+)]
struct Opt {
|
--- a/cmd/src/main.rs
+++ b/cmd/src/main.rs
@@ -9,3 +9,13 @@
CON #[derive(StructOpt, Debug)]
DEL #[structopt(name = "cxxbridge", author)]
ADD #[structopt(
ADD name = "cxxbridge",
ADD author,
ADD about = "https://github.com/dtolnay/cxx",
ADD usage = "\
ADD cxxbridge <input>.rs Emit .cc file for bridge to stdout
ADD cxxbridge <input>.rs --header Emit .h file for bridge to stdout
ADD cxxbridge --header Emit cxxbridge.h header to stdout",
ADD help_message = "Print help information",
ADD version_message = "Print version information"
ADD )]
CON struct Opt {
|
<<<<<<< SEARCH
#[derive(StructOpt, Debug)]
#[structopt(name = "cxxbridge", author)]
struct Opt {
/// Input Rust source file containing #[cxx::bridge]
=======
#[derive(StructOpt, Debug)]
#[structopt(
name = "cxxbridge",
author,
about = "https://github.com/dtolnay/cxx",
usage = "\
cxxbridge <input>.rs Emit .cc file for bridge to stdout
cxxbridge <input>.rs --header Emit .h file for bridge to stdout
cxxbridge --header Emit cxxbridge.h header to stdout",
help_message = "Print help information",
version_message = "Print version information"
)]
struct Opt {
/// Input Rust source file containing #[cxx::bridge]
>>>>>>> REPLACE
|
attm2x/m2x-tessel
|
b68e2d2aea2341ef5ef7138e53dea65a9489abe8
|
examples/update-single.js
|
javascript
|
mit
|
Fix method name typo in examples
|
#!/usr/bin/env node
//
// See https://github.com/attm2x/m2x-nodejs/blob/master/README.md#example-usage
// for instructions
//
var config = require("./config");
var M2X = require("m2x-tessel");
var m2xClient = new M2X(config.api_key);
var stream = "temperature";
var stream_params = {
"unit": {
"label": "celsius",
"symbol": "C"
},
"type": "numeric"
};
// Create the stream if it doesn't exist already
m2xClient.devices.updateStream(config.device, stream, stream_params, function(response) {
if (response.isError()) {
console.log("Cannot create stream:", response);
return;
}
var temperature = 24;
setInterval(function () {
console.log("I'm updating stream values! (Press CTRL + C to stop)");
// Update the latest stream value to our new value
m2xClient.devices.updateStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
console.log(result.error());
}
});
new_value += 1;
}, 5000);
});
|
#!/usr/bin/env node
//
// See https://github.com/attm2x/m2x-nodejs/blob/master/README.md#example-usage
// for instructions
//
var config = require("./config");
var M2X = require("m2x-tessel");
var m2xClient = new M2X(config.api_key);
var stream = "temperature";
var stream_params = {
"unit": {
"label": "celsius",
"symbol": "C"
},
"type": "numeric"
};
// Create the stream if it doesn't exist already
m2xClient.devices.updateStream(config.device, stream, stream_params, function(response) {
if (response.isError()) {
console.log("Cannot create stream:", response);
return;
}
var temperature = 24;
setInterval(function () {
console.log("I'm updating stream values! (Press CTRL + C to stop)");
// Update the latest stream value to our new value
m2xClient.devices.setStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
console.log(result.error());
}
});
new_value += 1;
}, 5000);
});
| 1
| 1
| 1
|
mixed
|
--- a/examples/update-single.js
+++ b/examples/update-single.js
@@ -31,3 +31,3 @@
// Update the latest stream value to our new value
- m2xClient.devices.updateStreamValue(config.device, stream, {"value": new_value}, function(result) {
+ m2xClient.devices.setStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
|
--- a/examples/update-single.js
+++ b/examples/update-single.js
@@ ... @@
// Update the latest stream value to our new value
- m2xClient.devices.updateStreamValue(config.device, stream, {"value": new_value}, function(result) {
+ m2xClient.devices.setStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
|
--- a/examples/update-single.js
+++ b/examples/update-single.js
@@ -31,3 +31,3 @@
CON // Update the latest stream value to our new value
DEL m2xClient.devices.updateStreamValue(config.device, stream, {"value": new_value}, function(result) {
ADD m2xClient.devices.setStreamValue(config.device, stream, {"value": new_value}, function(result) {
CON if (result.isError()) {
|
<<<<<<< SEARCH
// Update the latest stream value to our new value
m2xClient.devices.updateStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
console.log(result.error());
=======
// Update the latest stream value to our new value
m2xClient.devices.setStreamValue(config.device, stream, {"value": new_value}, function(result) {
if (result.isError()) {
console.log(result.error());
>>>>>>> REPLACE
|
open2c/cooltools
|
ad6bb5b787b4b959ff24c71122fc6f4d1a7e7ff9
|
cooltools/cli/__init__.py
|
python
|
mit
|
Add top-level cli debugging and verbosity options
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import click
from .. import __version__
# Monkey patch
click.core._verify_python3_env = lambda: None
CONTEXT_SETTINGS = {
'help_option_names': ['-h', '--help'],
}
@click.version_option(version=__version__)
@click.group(context_settings=CONTEXT_SETTINGS)
def cli():
pass
from . import (
dump_cworld,
diamond_insulation,
compute_expected,
compute_saddle,
call_dots,
)
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import click
import sys
from .. import __version__
# Monkey patch
click.core._verify_python3_env = lambda: None
CONTEXT_SETTINGS = {
'help_option_names': ['-h', '--help'],
}
@click.version_option(version=__version__)
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option(
'--debug/--no-debug',
help="Verbose logging",
default=False)
@click.option(
'-pm', '--post-mortem',
help="Post mortem debugging",
is_flag=True,
default=False)
def cli(debug, post_mortem):
"""
Type -h or --help after any subcommand for more information.
"""
if debug:
pass
#logger.setLevel(logging.DEBUG)
if post_mortem:
import traceback
try:
import ipdb as pdb
except ImportError:
import pdb
def _excepthook(exc_type, value, tb):
traceback.print_exception(exc_type, value, tb)
print()
pdb.pm()
sys.excepthook = _excepthook
from . import (
dump_cworld,
diamond_insulation,
compute_expected,
compute_saddle,
call_dots,
)
| 30
| 2
| 2
|
mixed
|
--- a/cooltools/cli/__init__.py
+++ b/cooltools/cli/__init__.py
@@ -3,2 +3,3 @@
import click
+import sys
from .. import __version__
@@ -17,4 +18,31 @@
@click.group(context_settings=CONTEXT_SETTINGS)
-def cli():
- pass
[email protected](
+ '--debug/--no-debug',
+ help="Verbose logging",
+ default=False)
[email protected](
+ '-pm', '--post-mortem',
+ help="Post mortem debugging",
+ is_flag=True,
+ default=False)
+def cli(debug, post_mortem):
+ """
+ Type -h or --help after any subcommand for more information.
+
+ """
+ if debug:
+ pass
+ #logger.setLevel(logging.DEBUG)
+
+ if post_mortem:
+ import traceback
+ try:
+ import ipdb as pdb
+ except ImportError:
+ import pdb
+ def _excepthook(exc_type, value, tb):
+ traceback.print_exception(exc_type, value, tb)
+ print()
+ pdb.pm()
+ sys.excepthook = _excepthook
|
--- a/cooltools/cli/__init__.py
+++ b/cooltools/cli/__init__.py
@@ ... @@
import click
+import sys
from .. import __version__
@@ ... @@
@click.group(context_settings=CONTEXT_SETTINGS)
-def cli():
- pass
[email protected](
+ '--debug/--no-debug',
+ help="Verbose logging",
+ default=False)
[email protected](
+ '-pm', '--post-mortem',
+ help="Post mortem debugging",
+ is_flag=True,
+ default=False)
+def cli(debug, post_mortem):
+ """
+ Type -h or --help after any subcommand for more information.
+
+ """
+ if debug:
+ pass
+ #logger.setLevel(logging.DEBUG)
+
+ if post_mortem:
+ import traceback
+ try:
+ import ipdb as pdb
+ except ImportError:
+ import pdb
+ def _excepthook(exc_type, value, tb):
+ traceback.print_exception(exc_type, value, tb)
+ print()
+ pdb.pm()
+ sys.excepthook = _excepthook
|
--- a/cooltools/cli/__init__.py
+++ b/cooltools/cli/__init__.py
@@ -3,2 +3,3 @@
CON import click
ADD import sys
CON from .. import __version__
@@ -17,4 +18,31 @@
CON @click.group(context_settings=CONTEXT_SETTINGS)
DEL def cli():
DEL pass
ADD @click.option(
ADD '--debug/--no-debug',
ADD help="Verbose logging",
ADD default=False)
ADD @click.option(
ADD '-pm', '--post-mortem',
ADD help="Post mortem debugging",
ADD is_flag=True,
ADD default=False)
ADD def cli(debug, post_mortem):
ADD """
ADD Type -h or --help after any subcommand for more information.
ADD
ADD """
ADD if debug:
ADD pass
ADD #logger.setLevel(logging.DEBUG)
ADD
ADD if post_mortem:
ADD import traceback
ADD try:
ADD import ipdb as pdb
ADD except ImportError:
ADD import pdb
ADD def _excepthook(exc_type, value, tb):
ADD traceback.print_exception(exc_type, value, tb)
ADD print()
ADD pdb.pm()
ADD sys.excepthook = _excepthook
CON
|
<<<<<<< SEARCH
from __future__ import division, print_function
import click
from .. import __version__
=======
from __future__ import division, print_function
import click
import sys
from .. import __version__
>>>>>>> REPLACE
<<<<<<< SEARCH
@click.version_option(version=__version__)
@click.group(context_settings=CONTEXT_SETTINGS)
def cli():
pass
=======
@click.version_option(version=__version__)
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option(
'--debug/--no-debug',
help="Verbose logging",
default=False)
@click.option(
'-pm', '--post-mortem',
help="Post mortem debugging",
is_flag=True,
default=False)
def cli(debug, post_mortem):
"""
Type -h or --help after any subcommand for more information.
"""
if debug:
pass
#logger.setLevel(logging.DEBUG)
if post_mortem:
import traceback
try:
import ipdb as pdb
except ImportError:
import pdb
def _excepthook(exc_type, value, tb):
traceback.print_exception(exc_type, value, tb)
print()
pdb.pm()
sys.excepthook = _excepthook
>>>>>>> REPLACE
|
imp/requests-rs
|
4fd200e94abd33a57ac820d29b84df86b6757803
|
src/lib.rs
|
rust
|
mit
|
Add very basic crate doc
|
extern crate hyper;
extern crate json;
mod request;
mod response;
pub use request::Request;
pub use response::Response;
pub use response::Codes;
pub type Result = hyper::Result<Response>;
pub type Error = hyper::error::Error;
pub fn get(url: &str) -> Result {
Request::default().get(url)
}
pub fn post(url: &str) -> Result {
Request::default().post(url)
}
pub fn put(url: &str) -> Result {
Request::default().put(url)
}
pub fn head(url: &str) -> Result {
Request::default().head(url)
}
pub fn delete(url: &str) -> Result {
Request::default().delete(url)
}
|
//! requests - HTTP client library with simple API.\
//! If you have used Python requests module you will find the API familiar.
//!
//! # Quick Start
//!
//! ```rust
//! extern crate hyper;
//! extern crate requests;
//! let response = requests::get("http://httpbin.org/get").unwrap();
//! assert_eq!(response.url(), "http://httpbin.org/get");
//! assert_eq!(response.reason(), "OK");
//! assert_eq!(response.status_code(), hyper::Ok);
//! let data = response.json().unwrap();
//! assert_eq!(data["url"], "http://httpbin.org/get");
//! assert_eq!(data["headers"]["Host"], "httpbin.org");
//! assert_eq!(data["headers"]["User-Agent"],
//! concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
//! ```
extern crate hyper;
extern crate json;
mod request;
mod response;
pub use request::Request;
pub use response::Response;
pub use response::Codes;
pub type Result = hyper::Result<Response>;
pub type Error = hyper::error::Error;
pub fn get(url: &str) -> Result {
Request::default().get(url)
}
pub fn post(url: &str) -> Result {
Request::default().post(url)
}
pub fn put(url: &str) -> Result {
Request::default().put(url)
}
pub fn head(url: &str) -> Result {
Request::default().head(url)
}
pub fn delete(url: &str) -> Result {
Request::default().delete(url)
}
| 19
| 0
| 1
|
add_only
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1 +1,20 @@
+//! requests - HTTP client library with simple API.\
+//! If you have used Python requests module you will find the API familiar.
+//!
+//! # Quick Start
+//!
+//! ```rust
+//! extern crate hyper;
+//! extern crate requests;
+//! let response = requests::get("http://httpbin.org/get").unwrap();
+//! assert_eq!(response.url(), "http://httpbin.org/get");
+//! assert_eq!(response.reason(), "OK");
+//! assert_eq!(response.status_code(), hyper::Ok);
+//! let data = response.json().unwrap();
+//! assert_eq!(data["url"], "http://httpbin.org/get");
+//! assert_eq!(data["headers"]["Host"], "httpbin.org");
+//! assert_eq!(data["headers"]["User-Agent"],
+//! concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
+//! ```
+
extern crate hyper;
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ ... @@
+//! requests - HTTP client library with simple API.\
+//! If you have used Python requests module you will find the API familiar.
+//!
+//! # Quick Start
+//!
+//! ```rust
+//! extern crate hyper;
+//! extern crate requests;
+//! let response = requests::get("http://httpbin.org/get").unwrap();
+//! assert_eq!(response.url(), "http://httpbin.org/get");
+//! assert_eq!(response.reason(), "OK");
+//! assert_eq!(response.status_code(), hyper::Ok);
+//! let data = response.json().unwrap();
+//! assert_eq!(data["url"], "http://httpbin.org/get");
+//! assert_eq!(data["headers"]["Host"], "httpbin.org");
+//! assert_eq!(data["headers"]["User-Agent"],
+//! concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
+//! ```
+
extern crate hyper;
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1 +1,20 @@
ADD //! requests - HTTP client library with simple API.\
ADD //! If you have used Python requests module you will find the API familiar.
ADD //!
ADD //! # Quick Start
ADD //!
ADD //! ```rust
ADD //! extern crate hyper;
ADD //! extern crate requests;
ADD //! let response = requests::get("http://httpbin.org/get").unwrap();
ADD //! assert_eq!(response.url(), "http://httpbin.org/get");
ADD //! assert_eq!(response.reason(), "OK");
ADD //! assert_eq!(response.status_code(), hyper::Ok);
ADD //! let data = response.json().unwrap();
ADD //! assert_eq!(data["url"], "http://httpbin.org/get");
ADD //! assert_eq!(data["headers"]["Host"], "httpbin.org");
ADD //! assert_eq!(data["headers"]["User-Agent"],
ADD //! concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
ADD //! ```
ADD
CON extern crate hyper;
|
<<<<<<< SEARCH
extern crate hyper;
extern crate json;
=======
//! requests - HTTP client library with simple API.\
//! If you have used Python requests module you will find the API familiar.
//!
//! # Quick Start
//!
//! ```rust
//! extern crate hyper;
//! extern crate requests;
//! let response = requests::get("http://httpbin.org/get").unwrap();
//! assert_eq!(response.url(), "http://httpbin.org/get");
//! assert_eq!(response.reason(), "OK");
//! assert_eq!(response.status_code(), hyper::Ok);
//! let data = response.json().unwrap();
//! assert_eq!(data["url"], "http://httpbin.org/get");
//! assert_eq!(data["headers"]["Host"], "httpbin.org");
//! assert_eq!(data["headers"]["User-Agent"],
//! concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
//! ```
extern crate hyper;
extern crate json;
>>>>>>> REPLACE
|
caromimo/textreader
|
7b9080ddb8569376454055668b1923dc044fb9b4
|
src/app.js
|
javascript
|
mit
|
Refactor to use spawn and more events.
|
var tmp = require('tmp');
var express = require('express');
var app = express();
app.use(express.bodyParser());
app.get('/vocalization/fr/homme', function (request, response) {
response.header('Content-Type', 'audio/mpeg');
response.header('Content-Disposition', 'inline; filename=test.mp3');
var exec = require('child_process').exec;
var text = request.query.texte;
tmp.tmpName(function _tempNameGenerated(err, tempfile) {
if (err) throw err;
// Below espeak takes text and transform it to wav
// using the mbrola voice (prefixed with mb-) then
// we use lame transforms the raw audio to mp3.
var command = 'espeak -v mb-fr1 -w ' + tempfile + ' -a 15 -p 50 -s 130 "' + text + '" | lame -r -m m -b 24 -s 16 ' + tempfile + ' ' + tempfile;
exec(command, function (error, stdout, stderr) {
response.sendfile(tempfile);
});
});
});
module.exports = app;
|
var tmp = require('tmp');
var express = require('express');
var app = express();
app.use(express.bodyParser());
app.get('/vocalization/fr/homme', function (request, response) {
response.header('Content-Type', 'audio/mpeg');
response.header('Content-Disposition', 'inline; filename=test.mp3');
var spawn = require('child_process').spawn;
tmp.tmpName(function _tempNameGenerated(err, espeakTmpfile) {
if (err) throw err;
var espeak = spawn('espeak', ['-vmb-fr1', '-w' + espeakTmpfile , '-s130', request.query.texte]);
espeak.on('exit', function(exitCode){
tmp.tmpName(function _tempNameGenerated(err, lameTmpfile) {
if (err) throw err;
// volume normalization with fast replaygain is used by default.
var options = ['-r', '-mm', '--silent', '-b24', '-s16', espeakTmpfile, lameTmpfile];
var lame = spawn('lame', options);
lame.on('exit', function(exitCode){
response.sendfile(lameTmpfile);
});
lame.stderr.on('data', function(data){
console.log("Lame error: " + data);
});
});
});
espeak.stderr.on('data', function(data){
console.log("Espeak error: " + data);
});
});
});
module.exports = app;
| 26
| 10
| 1
|
mixed
|
--- a/src/app.js
+++ b/src/app.js
@@ -8,16 +8,32 @@
response.header('Content-Disposition', 'inline; filename=test.mp3');
- var exec = require('child_process').exec;
- var text = request.query.texte;
+ var spawn = require('child_process').spawn;
+ tmp.tmpName(function _tempNameGenerated(err, espeakTmpfile) {
+ if (err) throw err;
+ var espeak = spawn('espeak', ['-vmb-fr1', '-w' + espeakTmpfile , '-s130', request.query.texte]);
- tmp.tmpName(function _tempNameGenerated(err, tempfile) {
- if (err) throw err;
- // Below espeak takes text and transform it to wav
- // using the mbrola voice (prefixed with mb-) then
- // we use lame transforms the raw audio to mp3.
- var command = 'espeak -v mb-fr1 -w ' + tempfile + ' -a 15 -p 50 -s 130 "' + text + '" | lame -r -m m -b 24 -s 16 ' + tempfile + ' ' + tempfile;
- exec(command, function (error, stdout, stderr) {
- response.sendfile(tempfile);
+ espeak.on('exit', function(exitCode){
+ tmp.tmpName(function _tempNameGenerated(err, lameTmpfile) {
+ if (err) throw err;
+ // volume normalization with fast replaygain is used by default.
+ var options = ['-r', '-mm', '--silent', '-b24', '-s16', espeakTmpfile, lameTmpfile];
+ var lame = spawn('lame', options);
+
+ lame.on('exit', function(exitCode){
+ response.sendfile(lameTmpfile);
+ });
+
+ lame.stderr.on('data', function(data){
+ console.log("Lame error: " + data);
+ });
+
+ });
});
+
+ espeak.stderr.on('data', function(data){
+ console.log("Espeak error: " + data);
+ });
+
});
+
});
|
--- a/src/app.js
+++ b/src/app.js
@@ ... @@
response.header('Content-Disposition', 'inline; filename=test.mp3');
- var exec = require('child_process').exec;
- var text = request.query.texte;
+ var spawn = require('child_process').spawn;
+ tmp.tmpName(function _tempNameGenerated(err, espeakTmpfile) {
+ if (err) throw err;
+ var espeak = spawn('espeak', ['-vmb-fr1', '-w' + espeakTmpfile , '-s130', request.query.texte]);
- tmp.tmpName(function _tempNameGenerated(err, tempfile) {
- if (err) throw err;
- // Below espeak takes text and transform it to wav
- // using the mbrola voice (prefixed with mb-) then
- // we use lame transforms the raw audio to mp3.
- var command = 'espeak -v mb-fr1 -w ' + tempfile + ' -a 15 -p 50 -s 130 "' + text + '" | lame -r -m m -b 24 -s 16 ' + tempfile + ' ' + tempfile;
- exec(command, function (error, stdout, stderr) {
- response.sendfile(tempfile);
+ espeak.on('exit', function(exitCode){
+ tmp.tmpName(function _tempNameGenerated(err, lameTmpfile) {
+ if (err) throw err;
+ // volume normalization with fast replaygain is used by default.
+ var options = ['-r', '-mm', '--silent', '-b24', '-s16', espeakTmpfile, lameTmpfile];
+ var lame = spawn('lame', options);
+
+ lame.on('exit', function(exitCode){
+ response.sendfile(lameTmpfile);
+ });
+
+ lame.stderr.on('data', function(data){
+ console.log("Lame error: " + data);
+ });
+
+ });
});
+
+ espeak.stderr.on('data', function(data){
+ console.log("Espeak error: " + data);
+ });
+
});
+
});
|
--- a/src/app.js
+++ b/src/app.js
@@ -8,16 +8,32 @@
CON response.header('Content-Disposition', 'inline; filename=test.mp3');
DEL var exec = require('child_process').exec;
DEL var text = request.query.texte;
ADD var spawn = require('child_process').spawn;
CON
ADD tmp.tmpName(function _tempNameGenerated(err, espeakTmpfile) {
ADD if (err) throw err;
ADD var espeak = spawn('espeak', ['-vmb-fr1', '-w' + espeakTmpfile , '-s130', request.query.texte]);
CON
DEL tmp.tmpName(function _tempNameGenerated(err, tempfile) {
DEL if (err) throw err;
DEL // Below espeak takes text and transform it to wav
DEL // using the mbrola voice (prefixed with mb-) then
DEL // we use lame transforms the raw audio to mp3.
DEL var command = 'espeak -v mb-fr1 -w ' + tempfile + ' -a 15 -p 50 -s 130 "' + text + '" | lame -r -m m -b 24 -s 16 ' + tempfile + ' ' + tempfile;
DEL exec(command, function (error, stdout, stderr) {
DEL response.sendfile(tempfile);
ADD espeak.on('exit', function(exitCode){
ADD tmp.tmpName(function _tempNameGenerated(err, lameTmpfile) {
ADD if (err) throw err;
ADD // volume normalization with fast replaygain is used by default.
ADD var options = ['-r', '-mm', '--silent', '-b24', '-s16', espeakTmpfile, lameTmpfile];
ADD var lame = spawn('lame', options);
ADD
ADD lame.on('exit', function(exitCode){
ADD response.sendfile(lameTmpfile);
ADD });
ADD
ADD lame.stderr.on('data', function(data){
ADD console.log("Lame error: " + data);
ADD });
ADD
ADD });
CON });
ADD
ADD espeak.stderr.on('data', function(data){
ADD console.log("Espeak error: " + data);
ADD });
ADD
CON });
ADD
CON });
|
<<<<<<< SEARCH
response.header('Content-Type', 'audio/mpeg');
response.header('Content-Disposition', 'inline; filename=test.mp3');
var exec = require('child_process').exec;
var text = request.query.texte;
tmp.tmpName(function _tempNameGenerated(err, tempfile) {
if (err) throw err;
// Below espeak takes text and transform it to wav
// using the mbrola voice (prefixed with mb-) then
// we use lame transforms the raw audio to mp3.
var command = 'espeak -v mb-fr1 -w ' + tempfile + ' -a 15 -p 50 -s 130 "' + text + '" | lame -r -m m -b 24 -s 16 ' + tempfile + ' ' + tempfile;
exec(command, function (error, stdout, stderr) {
response.sendfile(tempfile);
});
});
});
=======
response.header('Content-Type', 'audio/mpeg');
response.header('Content-Disposition', 'inline; filename=test.mp3');
var spawn = require('child_process').spawn;
tmp.tmpName(function _tempNameGenerated(err, espeakTmpfile) {
if (err) throw err;
var espeak = spawn('espeak', ['-vmb-fr1', '-w' + espeakTmpfile , '-s130', request.query.texte]);
espeak.on('exit', function(exitCode){
tmp.tmpName(function _tempNameGenerated(err, lameTmpfile) {
if (err) throw err;
// volume normalization with fast replaygain is used by default.
var options = ['-r', '-mm', '--silent', '-b24', '-s16', espeakTmpfile, lameTmpfile];
var lame = spawn('lame', options);
lame.on('exit', function(exitCode){
response.sendfile(lameTmpfile);
});
lame.stderr.on('data', function(data){
console.log("Lame error: " + data);
});
});
});
espeak.stderr.on('data', function(data){
console.log("Espeak error: " + data);
});
});
});
>>>>>>> REPLACE
|
kamatama41/embulk-test-helpers
|
63df83e0ca712b939651fe1c686e6874fef383ba
|
build.gradle.kts
|
kotlin
|
mit
|
Add a limitation of memory usage for test task.
|
import com.github.kamatama41.gradle.gitrelease.GitReleaseExtension
buildscript {
repositories {
jcenter()
maven { setUrl("http://kamatama41.github.com/maven-repository/repository") }
}
dependencies {
classpath("com.github.kamatama41:gradle-git-release-plugin:0.2.0")
}
}
plugins {
idea
kotlin("jvm") version "1.2.31"
}
apply {
plugin("com.github.kamatama41.git-release")
}
repositories {
jcenter()
}
configure<JavaPluginConvention> {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
dependencies {
compile(kotlin("stdlib"))
compile("org.embulk:embulk-standards:0.9.7")
compile("org.embulk:embulk-test:0.9.7")
testCompile("junit:junit:4.12")
}
configure<GitReleaseExtension> {
groupId = "com.github.kamatama41"
artifactId = "embulk-test-helpers"
repoUri = "[email protected]:kamatama41/maven-repository.git"
repoDir = file("${System.getProperty("user.home")}/gh-maven-repository")
}
|
import com.github.kamatama41.gradle.gitrelease.GitReleaseExtension
buildscript {
repositories {
jcenter()
maven { setUrl("http://kamatama41.github.com/maven-repository/repository") }
}
dependencies {
classpath("com.github.kamatama41:gradle-git-release-plugin:0.2.0")
}
}
plugins {
idea
kotlin("jvm") version "1.2.31"
}
apply {
plugin("com.github.kamatama41.git-release")
}
repositories {
jcenter()
}
configure<JavaPluginConvention> {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
dependencies {
compile(kotlin("stdlib"))
compile("org.embulk:embulk-standards:0.9.7")
compile("org.embulk:embulk-test:0.9.7")
testCompile("junit:junit:4.12")
}
configure<GitReleaseExtension> {
groupId = "com.github.kamatama41"
artifactId = "embulk-test-helpers"
repoUri = "[email protected]:kamatama41/maven-repository.git"
repoDir = file("${System.getProperty("user.home")}/gh-maven-repository")
}
tasks {
named<Test>("test") {
// Not to exceed the limit of CircleCI (4GB)
maxHeapSize = "3g"
}
}
| 7
| 0
| 1
|
add_only
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -43 +43,8 @@
}
+
+tasks {
+ named<Test>("test") {
+ // Not to exceed the limit of CircleCI (4GB)
+ maxHeapSize = "3g"
+ }
+}
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ ... @@
}
+
+tasks {
+ named<Test>("test") {
+ // Not to exceed the limit of CircleCI (4GB)
+ maxHeapSize = "3g"
+ }
+}
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -43 +43,8 @@
CON }
ADD
ADD tasks {
ADD named<Test>("test") {
ADD // Not to exceed the limit of CircleCI (4GB)
ADD maxHeapSize = "3g"
ADD }
ADD }
|
<<<<<<< SEARCH
repoDir = file("${System.getProperty("user.home")}/gh-maven-repository")
}
=======
repoDir = file("${System.getProperty("user.home")}/gh-maven-repository")
}
tasks {
named<Test>("test") {
// Not to exceed the limit of CircleCI (4GB)
maxHeapSize = "3g"
}
}
>>>>>>> REPLACE
|
ognjen-petrovic/js-dxf
|
cecfcff0977bb7d1ee7748e73ddfe2345c8f5736
|
src/Layer.js
|
javascript
|
mit
|
Add possibility to set true color for layer
|
class Layer
{
constructor(name, colorNumber, lineTypeName)
{
this.name = name;
this.colorNumber = colorNumber;
this.lineTypeName = lineTypeName;
this.shapes = [];
}
toDxfString()
{
let s = '0\nLAYER\n';
s += '70\n64\n';
s += `2\n${this.name}\n`;
s += `62\n${this.colorNumber}\n`;
s += `6\n${this.lineTypeName}\n`;
return s;
}
addShape(shape)
{
this.shapes.push(shape);
shape.layer = this;
}
getShapes()
{
return this.shapes;
}
shapesToDxf()
{
let s = '';
for (let i = 0; i < this.shapes.length; ++i)
{
s += this.shapes[i].toDxfString();
}
return s;
}
}
module.exports = Layer;
|
class Layer
{
constructor(name, colorNumber, lineTypeName)
{
this.name = name;
this.colorNumber = colorNumber;
this.lineTypeName = lineTypeName;
this.shapes = [];
this.trueColor = -1;
}
toDxfString()
{
let s = '0\nLAYER\n';
s += '70\n64\n';
s += `2\n${this.name}\n`;
if (this.trueColor !== -1)
{
s += `420\n${this.trueColor}\n`
}
else
{
s += `62\n${this.colorNumber}\n`;
}
s += `6\n${this.lineTypeName}\n`;
return s;
}
setTrueColor(color)
{
this.trueColor = color;
}
addShape(shape)
{
this.shapes.push(shape);
shape.layer = this;
}
getShapes()
{
return this.shapes;
}
shapesToDxf()
{
let s = '';
for (let i = 0; i < this.shapes.length; ++i)
{
s += this.shapes[i].toDxfString();
}
return s;
}
}
module.exports = Layer;
| 14
| 1
| 2
|
mixed
|
--- a/src/Layer.js
+++ b/src/Layer.js
@@ -8,2 +8,3 @@
this.shapes = [];
+ this.trueColor = -1;
}
@@ -15,5 +16,17 @@
s += `2\n${this.name}\n`;
- s += `62\n${this.colorNumber}\n`;
+ if (this.trueColor !== -1)
+ {
+ s += `420\n${this.trueColor}\n`
+ }
+ else
+ {
+ s += `62\n${this.colorNumber}\n`;
+ }
s += `6\n${this.lineTypeName}\n`;
return s;
+ }
+
+ setTrueColor(color)
+ {
+ this.trueColor = color;
}
|
--- a/src/Layer.js
+++ b/src/Layer.js
@@ ... @@
this.shapes = [];
+ this.trueColor = -1;
}
@@ ... @@
s += `2\n${this.name}\n`;
- s += `62\n${this.colorNumber}\n`;
+ if (this.trueColor !== -1)
+ {
+ s += `420\n${this.trueColor}\n`
+ }
+ else
+ {
+ s += `62\n${this.colorNumber}\n`;
+ }
s += `6\n${this.lineTypeName}\n`;
return s;
+ }
+
+ setTrueColor(color)
+ {
+ this.trueColor = color;
}
|
--- a/src/Layer.js
+++ b/src/Layer.js
@@ -8,2 +8,3 @@
CON this.shapes = [];
ADD this.trueColor = -1;
CON }
@@ -15,5 +16,17 @@
CON s += `2\n${this.name}\n`;
DEL s += `62\n${this.colorNumber}\n`;
ADD if (this.trueColor !== -1)
ADD {
ADD s += `420\n${this.trueColor}\n`
ADD }
ADD else
ADD {
ADD s += `62\n${this.colorNumber}\n`;
ADD }
CON s += `6\n${this.lineTypeName}\n`;
CON return s;
ADD }
ADD
ADD setTrueColor(color)
ADD {
ADD this.trueColor = color;
CON }
|
<<<<<<< SEARCH
this.lineTypeName = lineTypeName;
this.shapes = [];
}
=======
this.lineTypeName = lineTypeName;
this.shapes = [];
this.trueColor = -1;
}
>>>>>>> REPLACE
<<<<<<< SEARCH
s += '70\n64\n';
s += `2\n${this.name}\n`;
s += `62\n${this.colorNumber}\n`;
s += `6\n${this.lineTypeName}\n`;
return s;
}
=======
s += '70\n64\n';
s += `2\n${this.name}\n`;
if (this.trueColor !== -1)
{
s += `420\n${this.trueColor}\n`
}
else
{
s += `62\n${this.colorNumber}\n`;
}
s += `6\n${this.lineTypeName}\n`;
return s;
}
setTrueColor(color)
{
this.trueColor = color;
}
>>>>>>> REPLACE
|
pix3ly/budget
|
2ad24b2dc3f38acdef8fa8023fdfcac9749094fc
|
resources/assets/js/app.js
|
javascript
|
mit
|
Update CommonJS importing to ES6
|
window.axios = require('axios')
window.Vue = require('vue')
Vue.component('chrome-picker', require('vue-color').Chrome)
Vue.component('button-dropdown', require('./components/ButtonDropdown.vue'))
Vue.component('datepicker', require('./components/DatePicker.vue')) // TODO DEPRECATE
Vue.component('date-picker', require('./components/DatePicker.vue'))
Vue.component('barchart', require('./components/BarChart.vue'))
Vue.component('dropdown', require('./components/Dropdown.vue'))
Vue.component('transaction-wizard', require('./components/TransactionWizard.vue'))
Vue.component('validation-error', require('./components/ValidationError.vue'))
Vue.component('searchable', require('./components/Searchable.vue'))
Vue.component('color-picker', require('./components/ColorPicker.vue'))
Vue.directive('click-outside', {
bind: function (e, binding, vnode) {
e.clickOutsideEvent = function (event) {
if (!(e == event.target || e.contains(event.target))) {
vnode.context[binding.expression](event)
}
}
document.body.addEventListener('click', e.clickOutsideEvent)
},
unbind: function (e) {
document.body.removeEventListener('click', e.clickOutsideEvent)
}
})
const app = new Vue({
el: '#app'
})
|
import axios from 'axios';
import Vue from 'vue';
import { Chrome } from 'vue-color';
import ButtonDropdown from './components/ButtonDropdown.vue';
import DatePicker from './components/DatePicker.vue';
import BarChart from './components/BarChart.vue';
import Dropdown from './components/Dropdown.vue';
import TransactionWizard from './components/TransactionWizard.vue';
import ValidationError from './components/ValidationError.vue';
import Searchable from './components/Searchable.vue';
import ColorPicker from './components/ColorPicker.vue';
window.axios = axios;
window.Vue = Vue;
Vue.component('chrome-picker', Chrome);
Vue.component('button-dropdown', ButtonDropdown);
Vue.component('datepicker', DatePicker); // TODO DEPRECATE
Vue.component('date-picker', DatePicker);
Vue.component('barchart', BarChart);
Vue.component('dropdown', Dropdown);
Vue.component('transaction-wizard', TransactionWizard);
Vue.component('validation-error', ValidationError);
Vue.component('searchable', Searchable);
Vue.component('color-picker', ColorPicker);
Vue.directive('click-outside', {
bind: function (e, binding, vnode) {
e.clickOutsideEvent = function (event) {
if (!(e == event.target || e.contains(event.target))) {
vnode.context[binding.expression](event)
}
}
document.body.addEventListener('click', e.clickOutsideEvent)
},
unbind: function (e) {
document.body.removeEventListener('click', e.clickOutsideEvent)
}
})
const app = new Vue({
el: '#app'
})
| 25
| 12
| 1
|
mixed
|
--- a/resources/assets/js/app.js
+++ b/resources/assets/js/app.js
@@ -1,16 +1,29 @@
-window.axios = require('axios')
+import axios from 'axios';
+import Vue from 'vue';
-window.Vue = require('vue')
+import { Chrome } from 'vue-color';
-Vue.component('chrome-picker', require('vue-color').Chrome)
+import ButtonDropdown from './components/ButtonDropdown.vue';
+import DatePicker from './components/DatePicker.vue';
+import BarChart from './components/BarChart.vue';
+import Dropdown from './components/Dropdown.vue';
+import TransactionWizard from './components/TransactionWizard.vue';
+import ValidationError from './components/ValidationError.vue';
+import Searchable from './components/Searchable.vue';
+import ColorPicker from './components/ColorPicker.vue';
-Vue.component('button-dropdown', require('./components/ButtonDropdown.vue'))
-Vue.component('datepicker', require('./components/DatePicker.vue')) // TODO DEPRECATE
-Vue.component('date-picker', require('./components/DatePicker.vue'))
-Vue.component('barchart', require('./components/BarChart.vue'))
-Vue.component('dropdown', require('./components/Dropdown.vue'))
-Vue.component('transaction-wizard', require('./components/TransactionWizard.vue'))
-Vue.component('validation-error', require('./components/ValidationError.vue'))
-Vue.component('searchable', require('./components/Searchable.vue'))
-Vue.component('color-picker', require('./components/ColorPicker.vue'))
+window.axios = axios;
+window.Vue = Vue;
+
+Vue.component('chrome-picker', Chrome);
+
+Vue.component('button-dropdown', ButtonDropdown);
+Vue.component('datepicker', DatePicker); // TODO DEPRECATE
+Vue.component('date-picker', DatePicker);
+Vue.component('barchart', BarChart);
+Vue.component('dropdown', Dropdown);
+Vue.component('transaction-wizard', TransactionWizard);
+Vue.component('validation-error', ValidationError);
+Vue.component('searchable', Searchable);
+Vue.component('color-picker', ColorPicker);
|
--- a/resources/assets/js/app.js
+++ b/resources/assets/js/app.js
@@ ... @@
-window.axios = require('axios')
+import axios from 'axios';
+import Vue from 'vue';
-window.Vue = require('vue')
+import { Chrome } from 'vue-color';
-Vue.component('chrome-picker', require('vue-color').Chrome)
+import ButtonDropdown from './components/ButtonDropdown.vue';
+import DatePicker from './components/DatePicker.vue';
+import BarChart from './components/BarChart.vue';
+import Dropdown from './components/Dropdown.vue';
+import TransactionWizard from './components/TransactionWizard.vue';
+import ValidationError from './components/ValidationError.vue';
+import Searchable from './components/Searchable.vue';
+import ColorPicker from './components/ColorPicker.vue';
-Vue.component('button-dropdown', require('./components/ButtonDropdown.vue'))
-Vue.component('datepicker', require('./components/DatePicker.vue')) // TODO DEPRECATE
-Vue.component('date-picker', require('./components/DatePicker.vue'))
-Vue.component('barchart', require('./components/BarChart.vue'))
-Vue.component('dropdown', require('./components/Dropdown.vue'))
-Vue.component('transaction-wizard', require('./components/TransactionWizard.vue'))
-Vue.component('validation-error', require('./components/ValidationError.vue'))
-Vue.component('searchable', require('./components/Searchable.vue'))
-Vue.component('color-picker', require('./components/ColorPicker.vue'))
+window.axios = axios;
+window.Vue = Vue;
+
+Vue.component('chrome-picker', Chrome);
+
+Vue.component('button-dropdown', ButtonDropdown);
+Vue.component('datepicker', DatePicker); // TODO DEPRECATE
+Vue.component('date-picker', DatePicker);
+Vue.component('barchart', BarChart);
+Vue.component('dropdown', Dropdown);
+Vue.component('transaction-wizard', TransactionWizard);
+Vue.component('validation-error', ValidationError);
+Vue.component('searchable', Searchable);
+Vue.component('color-picker', ColorPicker);
|
--- a/resources/assets/js/app.js
+++ b/resources/assets/js/app.js
@@ -1,16 +1,29 @@
DEL window.axios = require('axios')
ADD import axios from 'axios';
ADD import Vue from 'vue';
CON
DEL window.Vue = require('vue')
ADD import { Chrome } from 'vue-color';
CON
DEL Vue.component('chrome-picker', require('vue-color').Chrome)
ADD import ButtonDropdown from './components/ButtonDropdown.vue';
ADD import DatePicker from './components/DatePicker.vue';
ADD import BarChart from './components/BarChart.vue';
ADD import Dropdown from './components/Dropdown.vue';
ADD import TransactionWizard from './components/TransactionWizard.vue';
ADD import ValidationError from './components/ValidationError.vue';
ADD import Searchable from './components/Searchable.vue';
ADD import ColorPicker from './components/ColorPicker.vue';
CON
DEL Vue.component('button-dropdown', require('./components/ButtonDropdown.vue'))
DEL Vue.component('datepicker', require('./components/DatePicker.vue')) // TODO DEPRECATE
DEL Vue.component('date-picker', require('./components/DatePicker.vue'))
DEL Vue.component('barchart', require('./components/BarChart.vue'))
DEL Vue.component('dropdown', require('./components/Dropdown.vue'))
DEL Vue.component('transaction-wizard', require('./components/TransactionWizard.vue'))
DEL Vue.component('validation-error', require('./components/ValidationError.vue'))
DEL Vue.component('searchable', require('./components/Searchable.vue'))
DEL Vue.component('color-picker', require('./components/ColorPicker.vue'))
ADD window.axios = axios;
ADD window.Vue = Vue;
ADD
ADD Vue.component('chrome-picker', Chrome);
ADD
ADD Vue.component('button-dropdown', ButtonDropdown);
ADD Vue.component('datepicker', DatePicker); // TODO DEPRECATE
ADD Vue.component('date-picker', DatePicker);
ADD Vue.component('barchart', BarChart);
ADD Vue.component('dropdown', Dropdown);
ADD Vue.component('transaction-wizard', TransactionWizard);
ADD Vue.component('validation-error', ValidationError);
ADD Vue.component('searchable', Searchable);
ADD Vue.component('color-picker', ColorPicker);
CON
|
<<<<<<< SEARCH
window.axios = require('axios')
window.Vue = require('vue')
Vue.component('chrome-picker', require('vue-color').Chrome)
Vue.component('button-dropdown', require('./components/ButtonDropdown.vue'))
Vue.component('datepicker', require('./components/DatePicker.vue')) // TODO DEPRECATE
Vue.component('date-picker', require('./components/DatePicker.vue'))
Vue.component('barchart', require('./components/BarChart.vue'))
Vue.component('dropdown', require('./components/Dropdown.vue'))
Vue.component('transaction-wizard', require('./components/TransactionWizard.vue'))
Vue.component('validation-error', require('./components/ValidationError.vue'))
Vue.component('searchable', require('./components/Searchable.vue'))
Vue.component('color-picker', require('./components/ColorPicker.vue'))
Vue.directive('click-outside', {
=======
import axios from 'axios';
import Vue from 'vue';
import { Chrome } from 'vue-color';
import ButtonDropdown from './components/ButtonDropdown.vue';
import DatePicker from './components/DatePicker.vue';
import BarChart from './components/BarChart.vue';
import Dropdown from './components/Dropdown.vue';
import TransactionWizard from './components/TransactionWizard.vue';
import ValidationError from './components/ValidationError.vue';
import Searchable from './components/Searchable.vue';
import ColorPicker from './components/ColorPicker.vue';
window.axios = axios;
window.Vue = Vue;
Vue.component('chrome-picker', Chrome);
Vue.component('button-dropdown', ButtonDropdown);
Vue.component('datepicker', DatePicker); // TODO DEPRECATE
Vue.component('date-picker', DatePicker);
Vue.component('barchart', BarChart);
Vue.component('dropdown', Dropdown);
Vue.component('transaction-wizard', TransactionWizard);
Vue.component('validation-error', ValidationError);
Vue.component('searchable', Searchable);
Vue.component('color-picker', ColorPicker);
Vue.directive('click-outside', {
>>>>>>> REPLACE
|
robinverduijn/gradle
|
07c71ee0574810bef961cf1174f4bfeb493e659a
|
subprojects/core-api/core-api.gradle.kts
|
kotlin
|
apache-2.0
|
Use Java 8 in Core API
|
import org.gradle.gradlebuild.testing.integrationtests.cleanup.WhenNotEmpty
import org.gradle.gradlebuild.unittestandcompile.ModuleType
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
plugins {
`java-library`
// gradlebuild.`strict-compile`
}
dependencies {
api(project(":baseServices"))
api(project(":baseServicesGroovy"))
api(project(":logging"))
api(project(":persistentCache"))
api(project(":processServices"))
api(project(":resources"))
implementation(library("ant"))
implementation(library("commons_io"))
implementation(library("commons_lang"))
implementation(library("inject"))
testFixturesImplementation(project(":internalTesting"))
}
gradlebuildJava {
moduleType = ModuleType.WORKER
}
testFixtures {
from(":logging")
}
testFilesCleanup {
policy.set(WhenNotEmpty.REPORT)
}
|
import org.gradle.gradlebuild.testing.integrationtests.cleanup.WhenNotEmpty
import org.gradle.gradlebuild.unittestandcompile.ModuleType
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
plugins {
`java-library`
// gradlebuild.`strict-compile`
}
dependencies {
api(project(":baseServices"))
api(project(":baseServicesGroovy"))
api(project(":logging"))
api(project(":persistentCache"))
api(project(":processServices"))
api(project(":resources"))
implementation(library("ant"))
implementation(library("commons_io"))
implementation(library("commons_lang"))
implementation(library("inject"))
testFixturesImplementation(project(":internalTesting"))
}
gradlebuildJava {
moduleType = ModuleType.CORE
}
testFixtures {
from(":logging")
}
testFilesCleanup {
policy.set(WhenNotEmpty.REPORT)
}
| 1
| 1
| 1
|
mixed
|
--- a/subprojects/core-api/core-api.gradle.kts
+++ b/subprojects/core-api/core-api.gradle.kts
@@ -40,3 +40,3 @@
gradlebuildJava {
- moduleType = ModuleType.WORKER
+ moduleType = ModuleType.CORE
}
|
--- a/subprojects/core-api/core-api.gradle.kts
+++ b/subprojects/core-api/core-api.gradle.kts
@@ ... @@
gradlebuildJava {
- moduleType = ModuleType.WORKER
+ moduleType = ModuleType.CORE
}
|
--- a/subprojects/core-api/core-api.gradle.kts
+++ b/subprojects/core-api/core-api.gradle.kts
@@ -40,3 +40,3 @@
CON gradlebuildJava {
DEL moduleType = ModuleType.WORKER
ADD moduleType = ModuleType.CORE
CON }
|
<<<<<<< SEARCH
gradlebuildJava {
moduleType = ModuleType.WORKER
}
=======
gradlebuildJava {
moduleType = ModuleType.CORE
}
>>>>>>> REPLACE
|
PanDAWMS/panda-bigmon-atlas
|
ba32a22cc0cb41c4548c658a7195fab56dab6dbf
|
atlas/prodtask/tasks.py
|
python
|
apache-2.0
|
Add remove done staged rules
|
from __future__ import absolute_import, unicode_literals
from atlas.celerybackend.celery import app
from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed
from atlas.prodtask.hashtag import hashtag_request_to_tasks
from atlas.prodtask.mcevgen import sync_cvmfs_db
from atlas.prodtask.open_ended import check_open_ended
from atlas.prodtask.task_views import sync_old_tasks
import logging
_logger = logging.getLogger('prodtaskwebui')
@app.task
def test_celery():
_logger.info('test celery')
return 2
@app.task(ignore_result=True)
def sync_tasks():
sync_old_tasks(-1)
return None
@app.task(ignore_result=True)
def step_actions():
find_action_to_execute()
return None
@app.task(ignore_result=True)
def data_carousel():
submit_all_tapes_processed()
return None
@app.task(ignore_result=True)
def open_ended():
check_open_ended()
return None
@app.task(ignore_result=True)
def request_hashtags():
hashtag_request_to_tasks()
return None
@app.task(ignore_result=True)
def sync_evgen_jo():
sync_cvmfs_db()
return None
|
from __future__ import absolute_import, unicode_literals
from atlas.celerybackend.celery import app
from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed, delete_done_staging_rules
from atlas.prodtask.hashtag import hashtag_request_to_tasks
from atlas.prodtask.mcevgen import sync_cvmfs_db
from atlas.prodtask.open_ended import check_open_ended
from atlas.prodtask.task_views import sync_old_tasks
import logging
_logger = logging.getLogger('prodtaskwebui')
@app.task
def test_celery():
_logger.info('test celery')
return 2
@app.task(ignore_result=True)
def sync_tasks():
sync_old_tasks(-1)
return None
@app.task(ignore_result=True)
def step_actions():
find_action_to_execute()
return None
@app.task(ignore_result=True)
def data_carousel():
submit_all_tapes_processed()
return None
@app.task(ignore_result=True)
def open_ended():
check_open_ended()
return None
@app.task(ignore_result=True)
def request_hashtags():
hashtag_request_to_tasks()
return None
@app.task(ignore_result=True)
def sync_evgen_jo():
sync_cvmfs_db()
return None
@app.task(ignore_result=True)
def remove_done_staging(production_requests):
delete_done_staging_rules(production_requests)
return None
| 6
| 1
| 2
|
mixed
|
--- a/atlas/prodtask/tasks.py
+++ b/atlas/prodtask/tasks.py
@@ -3,3 +3,3 @@
from atlas.celerybackend.celery import app
-from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed
+from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed, delete_done_staging_rules
from atlas.prodtask.hashtag import hashtag_request_to_tasks
@@ -51 +51,6 @@
return None
+
[email protected](ignore_result=True)
+def remove_done_staging(production_requests):
+ delete_done_staging_rules(production_requests)
+ return None
|
--- a/atlas/prodtask/tasks.py
+++ b/atlas/prodtask/tasks.py
@@ ... @@
from atlas.celerybackend.celery import app
-from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed
+from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed, delete_done_staging_rules
from atlas.prodtask.hashtag import hashtag_request_to_tasks
@@ ... @@
return None
+
[email protected](ignore_result=True)
+def remove_done_staging(production_requests):
+ delete_done_staging_rules(production_requests)
+ return None
|
--- a/atlas/prodtask/tasks.py
+++ b/atlas/prodtask/tasks.py
@@ -3,3 +3,3 @@
CON from atlas.celerybackend.celery import app
DEL from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed
ADD from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed, delete_done_staging_rules
CON from atlas.prodtask.hashtag import hashtag_request_to_tasks
@@ -51 +51,6 @@
CON return None
ADD
ADD @app.task(ignore_result=True)
ADD def remove_done_staging(production_requests):
ADD delete_done_staging_rules(production_requests)
ADD return None
|
<<<<<<< SEARCH
from atlas.celerybackend.celery import app
from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed
from atlas.prodtask.hashtag import hashtag_request_to_tasks
from atlas.prodtask.mcevgen import sync_cvmfs_db
=======
from atlas.celerybackend.celery import app
from atlas.prestage.views import find_action_to_execute, submit_all_tapes_processed, delete_done_staging_rules
from atlas.prodtask.hashtag import hashtag_request_to_tasks
from atlas.prodtask.mcevgen import sync_cvmfs_db
>>>>>>> REPLACE
<<<<<<< SEARCH
sync_cvmfs_db()
return None
=======
sync_cvmfs_db()
return None
@app.task(ignore_result=True)
def remove_done_staging(production_requests):
delete_done_staging_rules(production_requests)
return None
>>>>>>> REPLACE
|
teamleadercrm/teamleader-ui
|
b78fed8d257a5fd2c841c37eb835ace986174c96
|
src/components/wysiwygEditor/decorators/linkDecorator.js
|
javascript
|
mit
|
:bug: Fix certain links not opening correctly
|
import React, { useState } from 'react';
import { IconExternalLinkSmallOutline } from '@teamleader/ui-icons';
import Box from '../../box';
import Link from '../../link';
import theme from './theme.css';
const findLinkEntities = (contentBlock, callback, contentState) => {
contentBlock.findEntityRanges((character) => {
const entityKey = character.getEntity();
return entityKey !== null && contentState.getEntity(entityKey).getType() === 'LINK';
}, callback);
};
const LinkEntity = ({ entityKey, contentState, children }) => {
const [showOpenLinkIcon, setShowOpenLinkIcon] = useState();
const { url } = contentState.getEntity(entityKey).getData();
const openLink = () => {
window.open(url, 'blank');
};
const toggleShowOpenLinkIcon = () => {
setShowOpenLinkIcon(!showOpenLinkIcon);
};
return (
<Box display="inline-block" onMouseEnter={toggleShowOpenLinkIcon} onMouseLeave={toggleShowOpenLinkIcon}>
<Link className={theme['link']} href="" inherit={false} onClick={(event) => event.preventDefault()}>
{children}
</Link>
{showOpenLinkIcon && <IconExternalLinkSmallOutline onClick={openLink} className={theme['icon']} />}
</Box>
);
};
export default {
strategy: findLinkEntities,
component: LinkEntity,
};
|
import React, { useState } from 'react';
import { IconExternalLinkSmallOutline } from '@teamleader/ui-icons';
import Box from '../../box';
import Link from '../../link';
import theme from './theme.css';
const findLinkEntities = (contentBlock, callback, contentState) => {
contentBlock.findEntityRanges((character) => {
const entityKey = character.getEntity();
return entityKey !== null && contentState.getEntity(entityKey).getType() === 'LINK';
}, callback);
};
const LinkEntity = ({ entityKey, contentState, children }) => {
const [showOpenLinkIcon, setShowOpenLinkIcon] = useState();
const { url } = contentState.getEntity(entityKey).getData();
const openLink = () => {
let prefixedUrl = url;
if (!url.includes('//')) {
prefixedUrl = '//' + url;
}
window.open(prefixedUrl, '_blank');
};
const toggleShowOpenLinkIcon = () => {
setShowOpenLinkIcon(!showOpenLinkIcon);
};
return (
<Box display="inline-block" onMouseEnter={toggleShowOpenLinkIcon} onMouseLeave={toggleShowOpenLinkIcon}>
<Link className={theme['link']} href="" inherit={false} onClick={(event) => event.preventDefault()}>
{children}
</Link>
{showOpenLinkIcon && <IconExternalLinkSmallOutline onClick={openLink} className={theme['icon']} />}
</Box>
);
};
export default {
strategy: findLinkEntities,
component: LinkEntity,
};
| 5
| 1
| 1
|
mixed
|
--- a/src/components/wysiwygEditor/decorators/linkDecorator.js
+++ b/src/components/wysiwygEditor/decorators/linkDecorator.js
@@ -20,3 +20,7 @@
const openLink = () => {
- window.open(url, 'blank');
+ let prefixedUrl = url;
+ if (!url.includes('//')) {
+ prefixedUrl = '//' + url;
+ }
+ window.open(prefixedUrl, '_blank');
};
|
--- a/src/components/wysiwygEditor/decorators/linkDecorator.js
+++ b/src/components/wysiwygEditor/decorators/linkDecorator.js
@@ ... @@
const openLink = () => {
- window.open(url, 'blank');
+ let prefixedUrl = url;
+ if (!url.includes('//')) {
+ prefixedUrl = '//' + url;
+ }
+ window.open(prefixedUrl, '_blank');
};
|
--- a/src/components/wysiwygEditor/decorators/linkDecorator.js
+++ b/src/components/wysiwygEditor/decorators/linkDecorator.js
@@ -20,3 +20,7 @@
CON const openLink = () => {
DEL window.open(url, 'blank');
ADD let prefixedUrl = url;
ADD if (!url.includes('//')) {
ADD prefixedUrl = '//' + url;
ADD }
ADD window.open(prefixedUrl, '_blank');
CON };
|
<<<<<<< SEARCH
const openLink = () => {
window.open(url, 'blank');
};
=======
const openLink = () => {
let prefixedUrl = url;
if (!url.includes('//')) {
prefixedUrl = '//' + url;
}
window.open(prefixedUrl, '_blank');
};
>>>>>>> REPLACE
|
jbranchaud/simple-sudoku-check
|
1c1b9577b538bfb7bc3cc7e2e392aa7a8f43ce86
|
Gruntfile.js
|
javascript
|
mit
|
Add a task to the Grunt file for just running the tests.
|
'use strict';
module.exports = function (grunt) {
// Show elapsed time at the end
require('time-grunt')(grunt);
// Load all grunt tasks
require('load-grunt-tasks')(grunt);
// Project configuration.
grunt.initConfig({
nodeunit: {
files: ['test/**/*_test.js']
},
jshint: {
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
},
gruntfile: {
src: 'Gruntfile.js'
},
lib: {
src: ['lib/**/*.js']
},
test: {
src: ['test/**/*.js']
}
},
watch: {
gruntfile: {
files: '<%= jshint.gruntfile.src %>',
tasks: ['jshint:gruntfile']
},
lib: {
files: '<%= jshint.lib.src %>',
tasks: ['jshint:lib', 'nodeunit']
},
test: {
files: '<%= jshint.test.src %>',
tasks: ['jshint:test', 'nodeunit']
}
}
});
// Default task.
grunt.registerTask('default', ['jshint', 'nodeunit']);
};
|
'use strict';
module.exports = function (grunt) {
// Show elapsed time at the end
require('time-grunt')(grunt);
// Load all grunt tasks
require('load-grunt-tasks')(grunt);
// Project configuration.
grunt.initConfig({
nodeunit: {
files: ['test/**/*_test.js']
},
jshint: {
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
},
gruntfile: {
src: 'Gruntfile.js'
},
lib: {
src: ['lib/**/*.js']
},
test: {
src: ['test/**/*.js']
}
},
watch: {
gruntfile: {
files: '<%= jshint.gruntfile.src %>',
tasks: ['jshint:gruntfile']
},
lib: {
files: '<%= jshint.lib.src %>',
tasks: ['jshint:lib', 'nodeunit']
},
test: {
files: '<%= jshint.test.src %>',
tasks: ['jshint:test', 'nodeunit']
}
}
});
// Default task.
grunt.registerTask('default', ['jshint', 'nodeunit']);
// Nodeunit Test task.
grunt.registerTask('test', ['nodeunit']);
};
| 3
| 0
| 1
|
add_only
|
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -47,2 +47,5 @@
+ // Nodeunit Test task.
+ grunt.registerTask('test', ['nodeunit']);
+
};
|
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ ... @@
+ // Nodeunit Test task.
+ grunt.registerTask('test', ['nodeunit']);
+
};
|
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -47,2 +47,5 @@
CON
ADD // Nodeunit Test task.
ADD grunt.registerTask('test', ['nodeunit']);
ADD
CON };
|
<<<<<<< SEARCH
grunt.registerTask('default', ['jshint', 'nodeunit']);
};
=======
grunt.registerTask('default', ['jshint', 'nodeunit']);
// Nodeunit Test task.
grunt.registerTask('test', ['nodeunit']);
};
>>>>>>> REPLACE
|
pyamsoft/pydroid
|
3e3d4d5f59244e15ee6ee1df8a158a3fffa407d5
|
core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
|
kotlin
|
apache-2.0
|
Check for off and on main thread
|
/*
* Copyright 2019 Peter Kenji Yamanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.pyamsoft.pydroid.core
import android.os.Looper
import androidx.annotation.CheckResult
object Enforcer {
private val mainLooper by lazy { Looper.getMainLooper() }
@CheckResult
fun isMainThread(): Boolean {
return mainLooper.thread == Thread.currentThread()
}
fun assertNotOnMainThread() {
if (isMainThread()) {
throw AssertionError("This operation must be off the Main/UI thread!")
}
}
}
|
/*
* Copyright 2019 Peter Kenji Yamanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.pyamsoft.pydroid.core
import android.os.Looper
import androidx.annotation.CheckResult
object Enforcer {
private val mainLooper by lazy { Looper.getMainLooper() }
@CheckResult
fun isMainThread(): Boolean {
return mainLooper.thread == Thread.currentThread()
}
@Deprecated(
"Use assertOffMainThread()", replaceWith = ReplaceWith(
"Enforcer.assertOffMainThread()",
imports = ["com.pyamsoft.pydroid.core.Enforcer"]
)
)
fun assertNotOnMainThread() {
assertOffMainThread()
}
fun assertOffMainThread() {
if (isMainThread()) {
throw AssertionError("This operation must be OFF the Main/UI thread!")
}
}
fun assertOnMainThread() {
if (!isMainThread()) {
throw AssertionError("This operation must be ON the Main/UI thread!")
}
}
}
| 17
| 1
| 1
|
mixed
|
--- a/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
+++ b/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
@@ -31,5 +31,21 @@
+ @Deprecated(
+ "Use assertOffMainThread()", replaceWith = ReplaceWith(
+ "Enforcer.assertOffMainThread()",
+ imports = ["com.pyamsoft.pydroid.core.Enforcer"]
+ )
+ )
fun assertNotOnMainThread() {
+ assertOffMainThread()
+ }
+
+ fun assertOffMainThread() {
if (isMainThread()) {
- throw AssertionError("This operation must be off the Main/UI thread!")
+ throw AssertionError("This operation must be OFF the Main/UI thread!")
+ }
+ }
+
+ fun assertOnMainThread() {
+ if (!isMainThread()) {
+ throw AssertionError("This operation must be ON the Main/UI thread!")
}
|
--- a/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
+++ b/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
@@ ... @@
+ @Deprecated(
+ "Use assertOffMainThread()", replaceWith = ReplaceWith(
+ "Enforcer.assertOffMainThread()",
+ imports = ["com.pyamsoft.pydroid.core.Enforcer"]
+ )
+ )
fun assertNotOnMainThread() {
+ assertOffMainThread()
+ }
+
+ fun assertOffMainThread() {
if (isMainThread()) {
- throw AssertionError("This operation must be off the Main/UI thread!")
+ throw AssertionError("This operation must be OFF the Main/UI thread!")
+ }
+ }
+
+ fun assertOnMainThread() {
+ if (!isMainThread()) {
+ throw AssertionError("This operation must be ON the Main/UI thread!")
}
|
--- a/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
+++ b/core/src/main/java/com/pyamsoft/pydroid/core/Enforcer.kt
@@ -31,5 +31,21 @@
CON
ADD @Deprecated(
ADD "Use assertOffMainThread()", replaceWith = ReplaceWith(
ADD "Enforcer.assertOffMainThread()",
ADD imports = ["com.pyamsoft.pydroid.core.Enforcer"]
ADD )
ADD )
CON fun assertNotOnMainThread() {
ADD assertOffMainThread()
ADD }
ADD
ADD fun assertOffMainThread() {
CON if (isMainThread()) {
DEL throw AssertionError("This operation must be off the Main/UI thread!")
ADD throw AssertionError("This operation must be OFF the Main/UI thread!")
ADD }
ADD }
ADD
ADD fun assertOnMainThread() {
ADD if (!isMainThread()) {
ADD throw AssertionError("This operation must be ON the Main/UI thread!")
CON }
|
<<<<<<< SEARCH
}
fun assertNotOnMainThread() {
if (isMainThread()) {
throw AssertionError("This operation must be off the Main/UI thread!")
}
}
=======
}
@Deprecated(
"Use assertOffMainThread()", replaceWith = ReplaceWith(
"Enforcer.assertOffMainThread()",
imports = ["com.pyamsoft.pydroid.core.Enforcer"]
)
)
fun assertNotOnMainThread() {
assertOffMainThread()
}
fun assertOffMainThread() {
if (isMainThread()) {
throw AssertionError("This operation must be OFF the Main/UI thread!")
}
}
fun assertOnMainThread() {
if (!isMainThread()) {
throw AssertionError("This operation must be ON the Main/UI thread!")
}
}
>>>>>>> REPLACE
|
salopensource/sal
|
f9c51c592483ab08417d4df33898d32f7700ffe9
|
sal/management/commands/update_admin_user.py
|
python
|
apache-2.0
|
Fix exception handling in management command. Clean up.
|
'''
Creates an admin user if there aren't any existing superusers
'''
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from optparse import make_option
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise StandardError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print('{0} updated'.format(username))
else:
print('There are already {0} superusers'.format(su_count))
|
"""Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
| 8
| 8
| 3
|
mixed
|
--- a/sal/management/commands/update_admin_user.py
+++ b/sal/management/commands/update_admin_user.py
@@ -1,8 +1,8 @@
-'''
-Creates an admin user if there aren't any existing superusers
-'''
+"""Creates an admin user if there aren't any existing superusers."""
+
+from optparse import make_option
+
+from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
-from django.contrib.auth.models import User
-from optparse import make_option
@@ -29,3 +29,3 @@
if not username or not password:
- raise StandardError('You must specify a username and password')
+ raise CommandError('You must specify a username and password')
# Get the current superusers
@@ -39,4 +39,4 @@
user.save()
- print('{0} updated'.format(username))
+ print(f'{username} updated')
else:
- print('There are already {0} superusers'.format(su_count))
+ print(f'There are already {su_count} superusers')
|
--- a/sal/management/commands/update_admin_user.py
+++ b/sal/management/commands/update_admin_user.py
@@ ... @@
-'''
-Creates an admin user if there aren't any existing superusers
-'''
+"""Creates an admin user if there aren't any existing superusers."""
+
+from optparse import make_option
+
+from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
-from django.contrib.auth.models import User
-from optparse import make_option
@@ ... @@
if not username or not password:
- raise StandardError('You must specify a username and password')
+ raise CommandError('You must specify a username and password')
# Get the current superusers
@@ ... @@
user.save()
- print('{0} updated'.format(username))
+ print(f'{username} updated')
else:
- print('There are already {0} superusers'.format(su_count))
+ print(f'There are already {su_count} superusers')
|
--- a/sal/management/commands/update_admin_user.py
+++ b/sal/management/commands/update_admin_user.py
@@ -1,8 +1,8 @@
DEL '''
DEL Creates an admin user if there aren't any existing superusers
DEL '''
ADD """Creates an admin user if there aren't any existing superusers."""
CON
ADD
ADD from optparse import make_option
ADD
ADD from django.contrib.auth.models import User
CON from django.core.management.base import BaseCommand, CommandError
DEL from django.contrib.auth.models import User
DEL from optparse import make_option
CON
@@ -29,3 +29,3 @@
CON if not username or not password:
DEL raise StandardError('You must specify a username and password')
ADD raise CommandError('You must specify a username and password')
CON # Get the current superusers
@@ -39,4 +39,4 @@
CON user.save()
DEL print('{0} updated'.format(username))
ADD print(f'{username} updated')
CON else:
DEL print('There are already {0} superusers'.format(su_count))
ADD print(f'There are already {su_count} superusers')
|
<<<<<<< SEARCH
'''
Creates an admin user if there aren't any existing superusers
'''
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from optparse import make_option
=======
"""Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
>>>>>>> REPLACE
<<<<<<< SEARCH
password = options.get('password')
if not username or not password:
raise StandardError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
=======
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
>>>>>>> REPLACE
<<<<<<< SEARCH
user.is_superuser = True
user.save()
print('{0} updated'.format(username))
else:
print('There are already {0} superusers'.format(su_count))
=======
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
>>>>>>> REPLACE
|
bmuschko/gradle-docker-plugin
|
b5fd8908a7b56592952d60386db8a4650d5c40fa
|
buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
|
kotlin
|
apache-2.0
|
Use the same version of AsciidoctorJ to avoid classpath issues
|
package com.bmuschko.gradle.docker
import org.asciidoctor.gradle.AsciidoctorPlugin
import org.asciidoctor.gradle.AsciidoctorTask
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.tasks.util.PatternSet
import org.gradle.kotlin.dsl.apply
import org.gradle.kotlin.dsl.delegateClosureOf
import org.gradle.kotlin.dsl.named
class UserGuidePlugin : Plugin<Project> {
override fun apply(project: Project): Unit = project.run {
applyAsciidocPlugin()
configureAsciidoctorTask()
}
private
fun Project.applyAsciidocPlugin() {
apply<AsciidoctorPlugin>()
}
private
fun Project.configureAsciidoctorTask() {
tasks.named<AsciidoctorTask>("asciidoctor") {
sourceDir = file("src/docs/asciidoc")
sources(delegateClosureOf<PatternSet> {
include("index.adoc")
})
attributes(
mapOf(
"toc" to "left",
"source-highlighter" to "prettify",
"icons" to "font",
"numbered" to "",
"idprefix" to "",
"docinfo1" to "true",
"sectanchors" to "true",
"samplesCodeDir" to file("src/docs/samples/code")
)
)
}
}
}
|
package com.bmuschko.gradle.docker
import org.asciidoctor.gradle.AsciidoctorExtension
import org.asciidoctor.gradle.AsciidoctorPlugin
import org.asciidoctor.gradle.AsciidoctorTask
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.tasks.util.PatternSet
import org.gradle.kotlin.dsl.apply
import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.delegateClosureOf
import org.gradle.kotlin.dsl.named
class UserGuidePlugin : Plugin<Project> {
override fun apply(project: Project): Unit = project.run {
applyAsciidocPlugin()
configureAsciidoctorExtension()
configureAsciidoctorTask()
}
private
fun Project.applyAsciidocPlugin() {
apply<AsciidoctorPlugin>()
}
private
fun Project.configureAsciidoctorExtension() {
configure<AsciidoctorExtension> {
setVersion("1.6.0-alpha.7")
}
}
private
fun Project.configureAsciidoctorTask() {
tasks.named<AsciidoctorTask>("asciidoctor") {
sourceDir = file("src/docs/asciidoc")
sources(delegateClosureOf<PatternSet> {
include("index.adoc")
})
attributes(
mapOf(
"toc" to "left",
"source-highlighter" to "prettify",
"icons" to "font",
"numbered" to "",
"idprefix" to "",
"docinfo1" to "true",
"sectanchors" to "true",
"samplesCodeDir" to file("src/docs/samples/code")
)
)
}
}
}
| 10
| 0
| 4
|
add_only
|
--- a/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
+++ b/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
@@ -2,2 +2,3 @@
+import org.asciidoctor.gradle.AsciidoctorExtension
import org.asciidoctor.gradle.AsciidoctorPlugin
@@ -8,2 +9,3 @@
import org.gradle.kotlin.dsl.apply
+import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.delegateClosureOf
@@ -14,2 +16,3 @@
applyAsciidocPlugin()
+ configureAsciidoctorExtension()
configureAsciidoctorTask()
@@ -20,2 +23,9 @@
apply<AsciidoctorPlugin>()
+ }
+
+ private
+ fun Project.configureAsciidoctorExtension() {
+ configure<AsciidoctorExtension> {
+ setVersion("1.6.0-alpha.7")
+ }
}
|
--- a/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
+++ b/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
@@ ... @@
+import org.asciidoctor.gradle.AsciidoctorExtension
import org.asciidoctor.gradle.AsciidoctorPlugin
@@ ... @@
import org.gradle.kotlin.dsl.apply
+import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.delegateClosureOf
@@ ... @@
applyAsciidocPlugin()
+ configureAsciidoctorExtension()
configureAsciidoctorTask()
@@ ... @@
apply<AsciidoctorPlugin>()
+ }
+
+ private
+ fun Project.configureAsciidoctorExtension() {
+ configure<AsciidoctorExtension> {
+ setVersion("1.6.0-alpha.7")
+ }
}
|
--- a/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
+++ b/buildSrc/src/main/kotlin/com/bmuschko/gradle/docker/UserGuidePlugin.kt
@@ -2,2 +2,3 @@
CON
ADD import org.asciidoctor.gradle.AsciidoctorExtension
CON import org.asciidoctor.gradle.AsciidoctorPlugin
@@ -8,2 +9,3 @@
CON import org.gradle.kotlin.dsl.apply
ADD import org.gradle.kotlin.dsl.configure
CON import org.gradle.kotlin.dsl.delegateClosureOf
@@ -14,2 +16,3 @@
CON applyAsciidocPlugin()
ADD configureAsciidoctorExtension()
CON configureAsciidoctorTask()
@@ -20,2 +23,9 @@
CON apply<AsciidoctorPlugin>()
ADD }
ADD
ADD private
ADD fun Project.configureAsciidoctorExtension() {
ADD configure<AsciidoctorExtension> {
ADD setVersion("1.6.0-alpha.7")
ADD }
CON }
|
<<<<<<< SEARCH
package com.bmuschko.gradle.docker
import org.asciidoctor.gradle.AsciidoctorPlugin
import org.asciidoctor.gradle.AsciidoctorTask
=======
package com.bmuschko.gradle.docker
import org.asciidoctor.gradle.AsciidoctorExtension
import org.asciidoctor.gradle.AsciidoctorPlugin
import org.asciidoctor.gradle.AsciidoctorTask
>>>>>>> REPLACE
<<<<<<< SEARCH
import org.gradle.api.tasks.util.PatternSet
import org.gradle.kotlin.dsl.apply
import org.gradle.kotlin.dsl.delegateClosureOf
import org.gradle.kotlin.dsl.named
=======
import org.gradle.api.tasks.util.PatternSet
import org.gradle.kotlin.dsl.apply
import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.delegateClosureOf
import org.gradle.kotlin.dsl.named
>>>>>>> REPLACE
<<<<<<< SEARCH
override fun apply(project: Project): Unit = project.run {
applyAsciidocPlugin()
configureAsciidoctorTask()
}
=======
override fun apply(project: Project): Unit = project.run {
applyAsciidocPlugin()
configureAsciidoctorExtension()
configureAsciidoctorTask()
}
>>>>>>> REPLACE
<<<<<<< SEARCH
fun Project.applyAsciidocPlugin() {
apply<AsciidoctorPlugin>()
}
=======
fun Project.applyAsciidocPlugin() {
apply<AsciidoctorPlugin>()
}
private
fun Project.configureAsciidoctorExtension() {
configure<AsciidoctorExtension> {
setVersion("1.6.0-alpha.7")
}
}
>>>>>>> REPLACE
|
davidsusu/tree-printer
|
8b911a2da6ebca1129e63ec8d1a4ca1e20a42579
|
src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
|
java
|
apache-2.0
|
Rename a variable in TrackingTreeNodeDecocator
|
package hu.webarticum.treeprinter.decorator;
import hu.webarticum.treeprinter.TreeNode;
public class TrackingTreeNodeDecorator extends AbstractTreeNodeDecorator {
public final TrackingTreeNodeDecorator parent;
public final int index;
public TrackingTreeNodeDecorator(TreeNode baseNode) {
this(baseNode, null, 0);
}
public TrackingTreeNodeDecorator(TreeNode baseNode, TrackingTreeNodeDecorator parent, int index) {
super(baseNode);
this.parent = parent;
this.index = index;
}
@Override
public String content() {
return decoratedNode.content();
}
@Override
protected TreeNode decorateChild(TreeNode childNode, int index) {
return new TrackingTreeNodeDecorator(childNode, this, index);
}
@Override
public boolean isDecorable() {
return false;
}
@Override
public int hashCode() {
int parentHashCode = parent != null ? parent.hashCode(): 0;
return (parentHashCode * 37) + index;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof TrackingTreeNodeDecorator)) {
return false;
}
TrackingTreeNodeDecorator otherReferenceTreeNode = (TrackingTreeNodeDecorator) other;
TrackingTreeNodeDecorator otherParent = otherReferenceTreeNode.parent;
if (this == otherReferenceTreeNode) {
return true;
} else if (parent == null) {
if (otherParent != null) {
return false;
}
} else if (otherParent == null || !parent.equals(otherParent)) {
return false;
}
return index == otherReferenceTreeNode.index;
}
}
|
package hu.webarticum.treeprinter.decorator;
import hu.webarticum.treeprinter.TreeNode;
public class TrackingTreeNodeDecorator extends AbstractTreeNodeDecorator {
public final TrackingTreeNodeDecorator parent;
public final int index;
public TrackingTreeNodeDecorator(TreeNode baseNode) {
this(baseNode, null, 0);
}
public TrackingTreeNodeDecorator(TreeNode baseNode, TrackingTreeNodeDecorator parent, int index) {
super(baseNode);
this.parent = parent;
this.index = index;
}
@Override
public String content() {
return decoratedNode.content();
}
@Override
protected TreeNode decorateChild(TreeNode childNode, int index) {
return new TrackingTreeNodeDecorator(childNode, this, index);
}
@Override
public boolean isDecorable() {
return false;
}
@Override
public int hashCode() {
int parentHashCode = parent != null ? parent.hashCode(): 0;
return (parentHashCode * 37) + index;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof TrackingTreeNodeDecorator)) {
return false;
}
TrackingTreeNodeDecorator otherTrackingTreeNodeDecorator = (TrackingTreeNodeDecorator) other;
TrackingTreeNodeDecorator otherParent = otherTrackingTreeNodeDecorator.parent;
if (this == otherTrackingTreeNodeDecorator) {
return true;
} else if (parent == null) {
if (otherParent != null) {
return false;
}
} else if (otherParent == null || !parent.equals(otherParent)) {
return false;
}
return index == otherTrackingTreeNodeDecorator.index;
}
}
| 4
| 4
| 2
|
mixed
|
--- a/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
+++ b/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
@@ -49,6 +49,6 @@
- TrackingTreeNodeDecorator otherReferenceTreeNode = (TrackingTreeNodeDecorator) other;
- TrackingTreeNodeDecorator otherParent = otherReferenceTreeNode.parent;
+ TrackingTreeNodeDecorator otherTrackingTreeNodeDecorator = (TrackingTreeNodeDecorator) other;
+ TrackingTreeNodeDecorator otherParent = otherTrackingTreeNodeDecorator.parent;
- if (this == otherReferenceTreeNode) {
+ if (this == otherTrackingTreeNodeDecorator) {
return true;
@@ -62,3 +62,3 @@
- return index == otherReferenceTreeNode.index;
+ return index == otherTrackingTreeNodeDecorator.index;
}
|
--- a/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
+++ b/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
@@ ... @@
- TrackingTreeNodeDecorator otherReferenceTreeNode = (TrackingTreeNodeDecorator) other;
- TrackingTreeNodeDecorator otherParent = otherReferenceTreeNode.parent;
+ TrackingTreeNodeDecorator otherTrackingTreeNodeDecorator = (TrackingTreeNodeDecorator) other;
+ TrackingTreeNodeDecorator otherParent = otherTrackingTreeNodeDecorator.parent;
- if (this == otherReferenceTreeNode) {
+ if (this == otherTrackingTreeNodeDecorator) {
return true;
@@ ... @@
- return index == otherReferenceTreeNode.index;
+ return index == otherTrackingTreeNodeDecorator.index;
}
|
--- a/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
+++ b/src/main/java/hu/webarticum/treeprinter/decorator/TrackingTreeNodeDecorator.java
@@ -49,6 +49,6 @@
CON
DEL TrackingTreeNodeDecorator otherReferenceTreeNode = (TrackingTreeNodeDecorator) other;
DEL TrackingTreeNodeDecorator otherParent = otherReferenceTreeNode.parent;
ADD TrackingTreeNodeDecorator otherTrackingTreeNodeDecorator = (TrackingTreeNodeDecorator) other;
ADD TrackingTreeNodeDecorator otherParent = otherTrackingTreeNodeDecorator.parent;
CON
DEL if (this == otherReferenceTreeNode) {
ADD if (this == otherTrackingTreeNodeDecorator) {
CON return true;
@@ -62,3 +62,3 @@
CON
DEL return index == otherReferenceTreeNode.index;
ADD return index == otherTrackingTreeNodeDecorator.index;
CON }
|
<<<<<<< SEARCH
}
TrackingTreeNodeDecorator otherReferenceTreeNode = (TrackingTreeNodeDecorator) other;
TrackingTreeNodeDecorator otherParent = otherReferenceTreeNode.parent;
if (this == otherReferenceTreeNode) {
return true;
} else if (parent == null) {
=======
}
TrackingTreeNodeDecorator otherTrackingTreeNodeDecorator = (TrackingTreeNodeDecorator) other;
TrackingTreeNodeDecorator otherParent = otherTrackingTreeNodeDecorator.parent;
if (this == otherTrackingTreeNodeDecorator) {
return true;
} else if (parent == null) {
>>>>>>> REPLACE
<<<<<<< SEARCH
}
return index == otherReferenceTreeNode.index;
}
=======
}
return index == otherTrackingTreeNodeDecorator.index;
}
>>>>>>> REPLACE
|
superjohan/pfm
|
ed523bb7a59c9bc681a7608dcc560f2e0e77b3a3
|
app/src/main/java/com/aerodeko/pfm/MainActivity.kt
|
kotlin
|
mit
|
Test commit for Slack integrations.
|
package com.aerodeko.pfm
import android.os.Bundle
import android.support.design.widget.FloatingActionButton
import android.support.design.widget.Snackbar
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.Toolbar
import android.view.View
import android.view.Menu
import android.view.MenuItem
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val toolbar = findViewById(R.id.toolbar) as Toolbar
setSupportActionBar(toolbar)
val fab = findViewById(R.id.fab) as FloatingActionButton
fab.setOnClickListener { view ->
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show()
}
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
// Inflate the menu; this adds items to the action bar if it is present.
menuInflater.inflate(R.menu.menu_main, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
val id = item.itemId
if (id == R.id.action_settings) {
return true
}
return super.onOptionsItemSelected(item)
}
}
|
package com.aerodeko.pfm
import android.os.Bundle
import android.support.design.widget.FloatingActionButton
import android.support.design.widget.Snackbar
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.Toolbar
import android.util.Log
import android.view.View
import android.view.Menu
import android.view.MenuItem
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val toolbar = findViewById(R.id.toolbar) as Toolbar
setSupportActionBar(toolbar)
val fab = findViewById(R.id.fab) as FloatingActionButton
fab.setOnClickListener { view ->
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show()
}
// FIXME: remove this shit
Log.d("test", "test")
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
// Inflate the menu; this adds items to the action bar if it is present.
menuInflater.inflate(R.menu.menu_main, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
val id = item.itemId
if (id == R.id.action_settings) {
return true
}
return super.onOptionsItemSelected(item)
}
}
| 4
| 0
| 2
|
add_only
|
--- a/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
+++ b/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
@@ -7,2 +7,3 @@
import android.support.v7.widget.Toolbar
+import android.util.Log
import android.view.View
@@ -23,2 +24,5 @@
}
+
+ // FIXME: remove this shit
+ Log.d("test", "test")
}
|
--- a/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
+++ b/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
@@ ... @@
import android.support.v7.widget.Toolbar
+import android.util.Log
import android.view.View
@@ ... @@
}
+
+ // FIXME: remove this shit
+ Log.d("test", "test")
}
|
--- a/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
+++ b/app/src/main/java/com/aerodeko/pfm/MainActivity.kt
@@ -7,2 +7,3 @@
CON import android.support.v7.widget.Toolbar
ADD import android.util.Log
CON import android.view.View
@@ -23,2 +24,5 @@
CON }
ADD
ADD // FIXME: remove this shit
ADD Log.d("test", "test")
CON }
|
<<<<<<< SEARCH
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.Toolbar
import android.view.View
import android.view.Menu
=======
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.Toolbar
import android.util.Log
import android.view.View
import android.view.Menu
>>>>>>> REPLACE
<<<<<<< SEARCH
.setAction("Action", null).show()
}
}
=======
.setAction("Action", null).show()
}
// FIXME: remove this shit
Log.d("test", "test")
}
>>>>>>> REPLACE
|
sirixdb/sirix
|
9107bad36365b4d1d87306654e340366d181c588
|
bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
|
kotlin
|
bsd-3-clause
|
Add refactoring and performance tweak
|
package org.sirix.rest.crud
import io.vertx.ext.web.Route
import io.vertx.ext.web.RoutingContext
import org.sirix.rest.crud.json.JsonCreate
import org.sirix.rest.crud.xml.XmlCreate
import java.nio.file.Path
class CreateMultipleResources(private val location: Path) {
suspend fun handle(ctx: RoutingContext): Route {
val fileUploads = ctx.fileUploads()
var xmlCount = 0
var jsonCount = 0
fileUploads.forEach { fileUpload ->
when (fileUpload.contentType()) {
"application/xml" -> xmlCount++
"application/json" -> jsonCount++
}
}
if (xmlCount > 0 && xmlCount != fileUploads.size) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
} else if (jsonCount > 0 && jsonCount != fileUploads.size) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
}
if (ctx.failed()) return ctx.currentRoute()
if (xmlCount > 0) XmlCreate(location, true).handle(ctx)
else if (jsonCount > 0) JsonCreate(location, true).handle(ctx)
return ctx.currentRoute()
}
}
|
package org.sirix.rest.crud
import io.vertx.ext.web.Route
import io.vertx.ext.web.RoutingContext
import org.sirix.rest.crud.json.JsonCreate
import org.sirix.rest.crud.xml.XmlCreate
import java.nio.file.Path
class CreateMultipleResources(private val location: Path) {
suspend fun handle(ctx: RoutingContext): Route {
val fileUploads = ctx.fileUploads()
var isXmlFiles = false
var isJsonFiles = false
fileUploads.forEach { fileUpload ->
when (fileUpload.contentType()) {
"application/xml" -> {
if (isJsonFiles) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
return ctx.currentRoute()
}
isXmlFiles = true
}
"application/json" -> {
if (isXmlFiles) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
return ctx.currentRoute()
}
isJsonFiles = true
}
}
}
if (isXmlFiles) XmlCreate(location, true).handle(ctx)
else if (isJsonFiles) JsonCreate(location, true).handle(ctx)
return ctx.currentRoute()
}
}
| 20
| 14
| 3
|
mixed
|
--- a/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
+++ b/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
@@ -11,4 +11,4 @@
val fileUploads = ctx.fileUploads()
- var xmlCount = 0
- var jsonCount = 0
+ var isXmlFiles = false
+ var isJsonFiles = false
@@ -16,4 +16,18 @@
when (fileUpload.contentType()) {
- "application/xml" -> xmlCount++
- "application/json" -> jsonCount++
+ "application/xml" -> {
+ if (isJsonFiles) {
+ ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
+ return ctx.currentRoute()
+ }
+
+ isXmlFiles = true
+ }
+ "application/json" -> {
+ if (isXmlFiles) {
+ ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
+ return ctx.currentRoute()
+ }
+
+ isJsonFiles = true
+ }
}
@@ -21,12 +35,4 @@
- if (xmlCount > 0 && xmlCount != fileUploads.size) {
- ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
- } else if (jsonCount > 0 && jsonCount != fileUploads.size) {
- ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
- }
-
- if (ctx.failed()) return ctx.currentRoute()
-
- if (xmlCount > 0) XmlCreate(location, true).handle(ctx)
- else if (jsonCount > 0) JsonCreate(location, true).handle(ctx)
+ if (isXmlFiles) XmlCreate(location, true).handle(ctx)
+ else if (isJsonFiles) JsonCreate(location, true).handle(ctx)
|
--- a/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
+++ b/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
@@ ... @@
val fileUploads = ctx.fileUploads()
- var xmlCount = 0
- var jsonCount = 0
+ var isXmlFiles = false
+ var isJsonFiles = false
@@ ... @@
when (fileUpload.contentType()) {
- "application/xml" -> xmlCount++
- "application/json" -> jsonCount++
+ "application/xml" -> {
+ if (isJsonFiles) {
+ ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
+ return ctx.currentRoute()
+ }
+
+ isXmlFiles = true
+ }
+ "application/json" -> {
+ if (isXmlFiles) {
+ ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
+ return ctx.currentRoute()
+ }
+
+ isJsonFiles = true
+ }
}
@@ ... @@
- if (xmlCount > 0 && xmlCount != fileUploads.size) {
- ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
- } else if (jsonCount > 0 && jsonCount != fileUploads.size) {
- ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
- }
-
- if (ctx.failed()) return ctx.currentRoute()
-
- if (xmlCount > 0) XmlCreate(location, true).handle(ctx)
- else if (jsonCount > 0) JsonCreate(location, true).handle(ctx)
+ if (isXmlFiles) XmlCreate(location, true).handle(ctx)
+ else if (isJsonFiles) JsonCreate(location, true).handle(ctx)
|
--- a/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
+++ b/bundles/sirix-rest-api/src/main/kotlin/org/sirix/rest/crud/CreateMultipleResources.kt
@@ -11,4 +11,4 @@
CON val fileUploads = ctx.fileUploads()
DEL var xmlCount = 0
DEL var jsonCount = 0
ADD var isXmlFiles = false
ADD var isJsonFiles = false
CON
@@ -16,4 +16,18 @@
CON when (fileUpload.contentType()) {
DEL "application/xml" -> xmlCount++
DEL "application/json" -> jsonCount++
ADD "application/xml" -> {
ADD if (isJsonFiles) {
ADD ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
ADD return ctx.currentRoute()
ADD }
ADD
ADD isXmlFiles = true
ADD }
ADD "application/json" -> {
ADD if (isXmlFiles) {
ADD ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
ADD return ctx.currentRoute()
ADD }
ADD
ADD isJsonFiles = true
ADD }
CON }
@@ -21,12 +35,4 @@
CON
DEL if (xmlCount > 0 && xmlCount != fileUploads.size) {
DEL ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
DEL } else if (jsonCount > 0 && jsonCount != fileUploads.size) {
DEL ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
DEL }
DEL
DEL if (ctx.failed()) return ctx.currentRoute()
DEL
DEL if (xmlCount > 0) XmlCreate(location, true).handle(ctx)
DEL else if (jsonCount > 0) JsonCreate(location, true).handle(ctx)
ADD if (isXmlFiles) XmlCreate(location, true).handle(ctx)
ADD else if (isJsonFiles) JsonCreate(location, true).handle(ctx)
CON
|
<<<<<<< SEARCH
suspend fun handle(ctx: RoutingContext): Route {
val fileUploads = ctx.fileUploads()
var xmlCount = 0
var jsonCount = 0
fileUploads.forEach { fileUpload ->
when (fileUpload.contentType()) {
"application/xml" -> xmlCount++
"application/json" -> jsonCount++
}
}
if (xmlCount > 0 && xmlCount != fileUploads.size) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
} else if (jsonCount > 0 && jsonCount != fileUploads.size) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
}
if (ctx.failed()) return ctx.currentRoute()
if (xmlCount > 0) XmlCreate(location, true).handle(ctx)
else if (jsonCount > 0) JsonCreate(location, true).handle(ctx)
return ctx.currentRoute()
=======
suspend fun handle(ctx: RoutingContext): Route {
val fileUploads = ctx.fileUploads()
var isXmlFiles = false
var isJsonFiles = false
fileUploads.forEach { fileUpload ->
when (fileUpload.contentType()) {
"application/xml" -> {
if (isJsonFiles) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
return ctx.currentRoute()
}
isXmlFiles = true
}
"application/json" -> {
if (isXmlFiles) {
ctx.fail(IllegalArgumentException("All uploaded files must be either of type XML or JSON."))
return ctx.currentRoute()
}
isJsonFiles = true
}
}
}
if (isXmlFiles) XmlCreate(location, true).handle(ctx)
else if (isJsonFiles) JsonCreate(location, true).handle(ctx)
return ctx.currentRoute()
>>>>>>> REPLACE
|
CS2103AUG2016-W14-C4/main
|
e3c44e4034a8626008b027e569b4d9db2eae5a37
|
src/main/java/seedu/taskitty/model/task/Name.java
|
java
|
mit
|
Improve task name by allowing wider input range
|
package seedu.taskitty.model.task;
import seedu.taskitty.commons.exceptions.IllegalValueException;
/**
* Represents a Task's name in the task manager.
* Guarantees: immutable; is valid as declared in {@link #isValidName(String)}
*/
public class Name {
public static final String MESSAGE_NAME_CONSTRAINTS = "Task names should be spaces or alphanumeric characters";
public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum} ]+";
public final String fullName;
/**
* Validates given name.
*
* @throws IllegalValueException if given name string is invalid.
*/
public Name(String name) throws IllegalValueException {
assert name != null;
String trimName = name.trim();
if (!isValidName(trimName)) {
throw new IllegalValueException(MESSAGE_NAME_CONSTRAINTS);
}
this.fullName = trimName;
}
/**
* Returns true if a given string is a valid person name.
*/
public static boolean isValidName(String test) {
return test.matches(NAME_VALIDATION_REGEX_FORMAT);
}
@Override
public String toString() {
return fullName;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof Name // instanceof handles nulls
&& this.fullName.equals(((Name) other).fullName)); // state check
}
@Override
public int hashCode() {
return fullName.hashCode();
}
}
|
package seedu.taskitty.model.task;
import seedu.taskitty.commons.exceptions.IllegalValueException;
/**
* Represents a Task's name in the task manager.
* Guarantees: immutable; is valid as declared in {@link #isValidName(String)}
*/
public class Name {
public static final String MESSAGE_NAME_CONSTRAINTS =
"Task names should be alphabets, numbers, spaces and common punctuations, Meow!";
public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum}!@$%&(),.? ]+";
public final String fullName;
/**
* Validates given name.
*
* @throws IllegalValueException if given name string is invalid.
*/
public Name(String name) throws IllegalValueException {
assert name != null;
String trimName = name.trim();
if (!isValidName(trimName)) {
throw new IllegalValueException(MESSAGE_NAME_CONSTRAINTS);
}
this.fullName = trimName;
}
/**
* Returns true if a given string is a valid person name.
*/
public static boolean isValidName(String test) {
return test.matches(NAME_VALIDATION_REGEX_FORMAT);
}
@Override
public String toString() {
return fullName;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof Name // instanceof handles nulls
&& this.fullName.equals(((Name) other).fullName)); // state check
}
@Override
public int hashCode() {
return fullName.hashCode();
}
}
| 3
| 2
| 1
|
mixed
|
--- a/src/main/java/seedu/taskitty/model/task/Name.java
+++ b/src/main/java/seedu/taskitty/model/task/Name.java
@@ -10,4 +10,5 @@
- public static final String MESSAGE_NAME_CONSTRAINTS = "Task names should be spaces or alphanumeric characters";
- public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum} ]+";
+ public static final String MESSAGE_NAME_CONSTRAINTS =
+ "Task names should be alphabets, numbers, spaces and common punctuations, Meow!";
+ public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum}!@$%&(),.? ]+";
|
--- a/src/main/java/seedu/taskitty/model/task/Name.java
+++ b/src/main/java/seedu/taskitty/model/task/Name.java
@@ ... @@
- public static final String MESSAGE_NAME_CONSTRAINTS = "Task names should be spaces or alphanumeric characters";
- public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum} ]+";
+ public static final String MESSAGE_NAME_CONSTRAINTS =
+ "Task names should be alphabets, numbers, spaces and common punctuations, Meow!";
+ public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum}!@$%&(),.? ]+";
|
--- a/src/main/java/seedu/taskitty/model/task/Name.java
+++ b/src/main/java/seedu/taskitty/model/task/Name.java
@@ -10,4 +10,5 @@
CON
DEL public static final String MESSAGE_NAME_CONSTRAINTS = "Task names should be spaces or alphanumeric characters";
DEL public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum} ]+";
ADD public static final String MESSAGE_NAME_CONSTRAINTS =
ADD "Task names should be alphabets, numbers, spaces and common punctuations, Meow!";
ADD public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum}!@$%&(),.? ]+";
CON
|
<<<<<<< SEARCH
public class Name {
public static final String MESSAGE_NAME_CONSTRAINTS = "Task names should be spaces or alphanumeric characters";
public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum} ]+";
public final String fullName;
=======
public class Name {
public static final String MESSAGE_NAME_CONSTRAINTS =
"Task names should be alphabets, numbers, spaces and common punctuations, Meow!";
public static final String NAME_VALIDATION_REGEX_FORMAT = "[\\p{Alnum}!@$%&(),.? ]+";
public final String fullName;
>>>>>>> REPLACE
|
DesertBot/DesertBot
|
81b6a138c476084f9ddd6063f31d3efd0ba6e2cf
|
start.py
|
python
|
mit
|
Make the logging level configurable
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import sys
from twisted.internet import reactor
from desertbot.config import Config, ConfigError
from desertbot.factory import DesertBotFactory
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='An IRC bot written in Python.')
parser.add_argument('-c', '--config',
help='the config file to read from',
type=str, required=True)
parser.add_argument('-l', '--loglevel',
help='the logging level (default INFO)',
type=str, default='INFO')
cmdArgs = parser.parse_args()
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Set up logging for stdout on the root 'desertbot' logger
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
if isinstance(numericLevel, int):
rootLogger.setLevel(numericLevel)
else:
raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
streamHandler = logging.StreamHandler(stream=sys.stdout)
streamHandler.setFormatter(logFormatter)
rootLogger.addHandler(streamHandler)
config = Config(cmdArgs.config)
try:
config.loadConfig()
except ConfigError:
rootLogger.exception("Failed to load configuration file {}".format(cmdArgs.config))
else:
factory = DesertBotFactory(config)
reactor.run()
| 8
| 1
| 2
|
mixed
|
--- a/start.py
+++ b/start.py
@@ -17,2 +17,5 @@
type=str, required=True)
+ parser.add_argument('-l', '--loglevel',
+ help='the logging level (default INFO)',
+ type=str, default='INFO')
cmdArgs = parser.parse_args()
@@ -24,3 +27,7 @@
rootLogger = logging.getLogger('desertbot')
- rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
+ numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
+ if isinstance(numericLevel, int):
+ rootLogger.setLevel(numericLevel)
+ else:
+ raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
|
--- a/start.py
+++ b/start.py
@@ ... @@
type=str, required=True)
+ parser.add_argument('-l', '--loglevel',
+ help='the logging level (default INFO)',
+ type=str, default='INFO')
cmdArgs = parser.parse_args()
@@ ... @@
rootLogger = logging.getLogger('desertbot')
- rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
+ numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
+ if isinstance(numericLevel, int):
+ rootLogger.setLevel(numericLevel)
+ else:
+ raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
|
--- a/start.py
+++ b/start.py
@@ -17,2 +17,5 @@
CON type=str, required=True)
ADD parser.add_argument('-l', '--loglevel',
ADD help='the logging level (default INFO)',
ADD type=str, default='INFO')
CON cmdArgs = parser.parse_args()
@@ -24,3 +27,7 @@
CON rootLogger = logging.getLogger('desertbot')
DEL rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
ADD numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
ADD if isinstance(numericLevel, int):
ADD rootLogger.setLevel(numericLevel)
ADD else:
ADD raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
CON
|
<<<<<<< SEARCH
help='the config file to read from',
type=str, required=True)
cmdArgs = parser.parse_args()
=======
help='the config file to read from',
type=str, required=True)
parser.add_argument('-l', '--loglevel',
help='the logging level (default INFO)',
type=str, default='INFO')
cmdArgs = parser.parse_args()
>>>>>>> REPLACE
<<<<<<< SEARCH
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
rootLogger.setLevel(logging.INFO) # TODO change this from config value once it's loaded
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
=======
# Modules can then just add more handlers to the root logger to capture all logs to files in various ways
rootLogger = logging.getLogger('desertbot')
numericLevel = getattr(logging, cmdArgs.loglevel.upper(), None)
if isinstance(numericLevel, int):
rootLogger.setLevel(numericLevel)
else:
raise ValueError('Invalid log level {}'.format(cmdArgs.loglevel))
logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%H:%M:%S')
>>>>>>> REPLACE
|
gdit-cnd/RAPID
|
6858e4a2e2047c906a3b8f69b7cd7b04a0cbf666
|
pivoteer/writer/censys.py
|
python
|
mit
|
Resolve issues with exporting empty dataset for certificate list
|
"""
Classes and functions for writing IndicatorRecord objects with a record type of "CE" (Censys Record)
"""
from pivoteer.writer.core import CsvWriter
class CensysCsvWriter(CsvWriter):
"""
A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record)
"""
def __init__(self, writer):
"""
Create a new CsvWriter for Censys Records using the given writer.
:param writer: The writer
"""
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
def create_header(self):
return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"]
def create_rows(self, record):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
|
"""
Classes and functions for writing IndicatorRecord objects with a record type of "CE" (Censys Record)
"""
from pivoteer.writer.core import CsvWriter
class CensysCsvWriter(CsvWriter):
"""
A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record)
"""
def __init__(self, writer):
"""
Create a new CsvWriter for Censys Records using the given writer.
:param writer: The writer
"""
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
def create_header(self):
return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"]
def create_rows(self, record):
if (record is not None and len(record) > 0):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
| 13
| 11
| 2
|
mixed
|
--- a/pivoteer/writer/censys.py
+++ b/pivoteer/writer/censys.py
@@ -20,2 +20,3 @@
+
def create_title_rows(self, indicator, records):
@@ -27,12 +28,13 @@
def create_rows(self, record):
- info = record["info"]
- records = info["records"]
- for record in records:
- parsed = record["parsed"]
- subject = parsed["subject_dn"]
- issuer = parsed["issuer_dn"]
- sha256 = parsed["fingerprint_sha256"]
- validity = parsed["validity"]
- start = validity["start"]
- end = validity["end"]
- yield [subject, issuer, sha256, start, end]
+ if (record is not None and len(record) > 0):
+ info = record["info"]
+ records = info["records"]
+ for record in records:
+ parsed = record["parsed"]
+ subject = parsed["subject_dn"]
+ issuer = parsed["issuer_dn"]
+ sha256 = parsed["fingerprint_sha256"]
+ validity = parsed["validity"]
+ start = validity["start"]
+ end = validity["end"]
+ yield [subject, issuer, sha256, start, end]
|
--- a/pivoteer/writer/censys.py
+++ b/pivoteer/writer/censys.py
@@ ... @@
+
def create_title_rows(self, indicator, records):
@@ ... @@
def create_rows(self, record):
- info = record["info"]
- records = info["records"]
- for record in records:
- parsed = record["parsed"]
- subject = parsed["subject_dn"]
- issuer = parsed["issuer_dn"]
- sha256 = parsed["fingerprint_sha256"]
- validity = parsed["validity"]
- start = validity["start"]
- end = validity["end"]
- yield [subject, issuer, sha256, start, end]
+ if (record is not None and len(record) > 0):
+ info = record["info"]
+ records = info["records"]
+ for record in records:
+ parsed = record["parsed"]
+ subject = parsed["subject_dn"]
+ issuer = parsed["issuer_dn"]
+ sha256 = parsed["fingerprint_sha256"]
+ validity = parsed["validity"]
+ start = validity["start"]
+ end = validity["end"]
+ yield [subject, issuer, sha256, start, end]
|
--- a/pivoteer/writer/censys.py
+++ b/pivoteer/writer/censys.py
@@ -20,2 +20,3 @@
CON
ADD
CON def create_title_rows(self, indicator, records):
@@ -27,12 +28,13 @@
CON def create_rows(self, record):
DEL info = record["info"]
DEL records = info["records"]
DEL for record in records:
DEL parsed = record["parsed"]
DEL subject = parsed["subject_dn"]
DEL issuer = parsed["issuer_dn"]
DEL sha256 = parsed["fingerprint_sha256"]
DEL validity = parsed["validity"]
DEL start = validity["start"]
DEL end = validity["end"]
DEL yield [subject, issuer, sha256, start, end]
ADD if (record is not None and len(record) > 0):
ADD info = record["info"]
ADD records = info["records"]
ADD for record in records:
ADD parsed = record["parsed"]
ADD subject = parsed["subject_dn"]
ADD issuer = parsed["issuer_dn"]
ADD sha256 = parsed["fingerprint_sha256"]
ADD validity = parsed["validity"]
ADD start = validity["start"]
ADD end = validity["end"]
ADD yield [subject, issuer, sha256, start, end]
|
<<<<<<< SEARCH
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
=======
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
>>>>>>> REPLACE
<<<<<<< SEARCH
def create_rows(self, record):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
=======
def create_rows(self, record):
if (record is not None and len(record) > 0):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
>>>>>>> REPLACE
|
bjpop/biotool
|
2acdacf6808c8d59448c69ae729d45fe8d8863aa
|
rust/src/main.rs
|
rust
|
mit
|
Use num_seqs == 1 to decide if min and max need initialisation
|
extern crate bio;
use std::io;
use std::cmp;
use bio::io::fasta;
fn main() {
let reader = fasta::Reader::new(io::stdin());
let mut num_seqs = 0;
let mut total = 0;
let mut max_len = 0;
let mut min_len = 0;
let mut this_len;
let mut first_seq = true;
println!("FILENAME\tTOTAL\tNUMSEQ\tMIN\tAVG\tMAX");
for next in reader.records() {
match next {
Ok(record) => {
num_seqs += 1;
this_len = record.seq().len();
total += this_len;
max_len = cmp::max(max_len, this_len);
if first_seq {
min_len = this_len;
first_seq = false;
}
else {
min_len = cmp::min(min_len, this_len);
}
},
Err(error) => println!("{}", error),
}
}
if num_seqs > 0 {
let average = ((total as f64) / (num_seqs as f64)).floor() as usize;
println!("{}\t{}\t{}\t{}\t{}", num_seqs, total, min_len, average, max_len);
}
else {
println!("0\t0\t-\t-\t-");
}
}
|
extern crate bio;
use std::io;
use std::cmp;
use bio::io::fasta;
fn main() {
let reader = fasta::Reader::new(io::stdin());
let mut num_seqs = 0;
let mut total = 0;
let mut max_len = 0;
let mut min_len = 0;
let mut this_len;
println!("FILENAME\tTOTAL\tNUMSEQ\tMIN\tAVG\tMAX");
for next in reader.records() {
match next {
Ok(record) => {
num_seqs += 1;
this_len = record.seq().len();
total += this_len;
if num_seqs == 1 {
max_len = this_len;
min_len = this_len;
}
else {
max_len = cmp::max(max_len, this_len);
min_len = cmp::min(min_len, this_len);
}
},
Err(error) => println!("{}", error),
}
}
if num_seqs > 0 {
let average = ((total as f64) / (num_seqs as f64)).floor() as usize;
println!("{}\t{}\t{}\t{}\t{}", num_seqs, total, min_len, average, max_len);
}
else {
println!("0\t0\t-\t-\t-");
}
}
| 3
| 4
| 2
|
mixed
|
--- a/rust/src/main.rs
+++ b/rust/src/main.rs
@@ -12,3 +12,2 @@
let mut this_len;
- let mut first_seq = true;
@@ -22,8 +21,8 @@
total += this_len;
- max_len = cmp::max(max_len, this_len);
- if first_seq {
+ if num_seqs == 1 {
+ max_len = this_len;
min_len = this_len;
- first_seq = false;
}
else {
+ max_len = cmp::max(max_len, this_len);
min_len = cmp::min(min_len, this_len);
|
--- a/rust/src/main.rs
+++ b/rust/src/main.rs
@@ ... @@
let mut this_len;
- let mut first_seq = true;
@@ ... @@
total += this_len;
- max_len = cmp::max(max_len, this_len);
- if first_seq {
+ if num_seqs == 1 {
+ max_len = this_len;
min_len = this_len;
- first_seq = false;
}
else {
+ max_len = cmp::max(max_len, this_len);
min_len = cmp::min(min_len, this_len);
|
--- a/rust/src/main.rs
+++ b/rust/src/main.rs
@@ -12,3 +12,2 @@
CON let mut this_len;
DEL let mut first_seq = true;
CON
@@ -22,8 +21,8 @@
CON total += this_len;
DEL max_len = cmp::max(max_len, this_len);
DEL if first_seq {
ADD if num_seqs == 1 {
ADD max_len = this_len;
CON min_len = this_len;
DEL first_seq = false;
CON }
CON else {
ADD max_len = cmp::max(max_len, this_len);
CON min_len = cmp::min(min_len, this_len);
|
<<<<<<< SEARCH
let mut min_len = 0;
let mut this_len;
let mut first_seq = true;
println!("FILENAME\tTOTAL\tNUMSEQ\tMIN\tAVG\tMAX");
=======
let mut min_len = 0;
let mut this_len;
println!("FILENAME\tTOTAL\tNUMSEQ\tMIN\tAVG\tMAX");
>>>>>>> REPLACE
<<<<<<< SEARCH
this_len = record.seq().len();
total += this_len;
max_len = cmp::max(max_len, this_len);
if first_seq {
min_len = this_len;
first_seq = false;
}
else {
min_len = cmp::min(min_len, this_len);
}
=======
this_len = record.seq().len();
total += this_len;
if num_seqs == 1 {
max_len = this_len;
min_len = this_len;
}
else {
max_len = cmp::max(max_len, this_len);
min_len = cmp::min(min_len, this_len);
}
>>>>>>> REPLACE
|
EmilStenstrom/nephele
|
f45b3e73b6258c99aed2bff2e7350f1c797ff849
|
providers/provider.py
|
python
|
mit
|
Remove support for Python 2.
|
import copy
import json
import requests
import html5lib
from application import APPLICATION as APP
# Be compatible with python 2 and 3
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector, timeout=60, cache=True):
html = self._http_get(url, timeout=timeout, cache=cache)
document = html5lib.parse(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
new_data = copy.copy(data)
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
new_data = new_data[item]
except (IndexError, KeyError):
return {}
return new_data
def parse_json(self, url, path=None, timeout=60, cache=True):
data = self._http_get(url, timeout=timeout, cache=cache)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
def urlencode(self, data):
return urlencode(data)
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60, cache=True):
base = requests if not cache else APP.setting("WEBCACHE")
response = base.get(url, timeout=timeout)
return response.text
|
import copy
import json
from urllib.parse import urlencode
import html5lib
import requests
from application import APPLICATION as APP
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector, timeout=60, cache=True):
html = self._http_get(url, timeout=timeout, cache=cache)
document = html5lib.parse(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
new_data = copy.copy(data)
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
new_data = new_data[item]
except (IndexError, KeyError):
return {}
return new_data
def parse_json(self, url, path=None, timeout=60, cache=True):
data = self._http_get(url, timeout=timeout, cache=cache)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
def urlencode(self, data):
return urlencode(data)
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60, cache=True):
base = requests if not cache else APP.setting("WEBCACHE")
response = base.get(url, timeout=timeout)
return response.text
| 3
| 6
| 1
|
mixed
|
--- a/providers/provider.py
+++ b/providers/provider.py
@@ -2,11 +2,8 @@
import json
+from urllib.parse import urlencode
+
+import html5lib
import requests
-import html5lib
from application import APPLICATION as APP
-# Be compatible with python 2 and 3
-try:
- from urllib import urlencode
-except ImportError:
- from urllib.parse import urlencode
|
--- a/providers/provider.py
+++ b/providers/provider.py
@@ ... @@
import json
+from urllib.parse import urlencode
+
+import html5lib
import requests
-import html5lib
from application import APPLICATION as APP
-# Be compatible with python 2 and 3
-try:
- from urllib import urlencode
-except ImportError:
- from urllib.parse import urlencode
|
--- a/providers/provider.py
+++ b/providers/provider.py
@@ -2,11 +2,8 @@
CON import json
ADD from urllib.parse import urlencode
ADD
ADD import html5lib
CON import requests
DEL import html5lib
CON from application import APPLICATION as APP
CON
DEL # Be compatible with python 2 and 3
DEL try:
DEL from urllib import urlencode
DEL except ImportError:
DEL from urllib.parse import urlencode
CON
|
<<<<<<< SEARCH
import copy
import json
import requests
import html5lib
from application import APPLICATION as APP
# Be compatible with python 2 and 3
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
class BaseProvider(object):
=======
import copy
import json
from urllib.parse import urlencode
import html5lib
import requests
from application import APPLICATION as APP
class BaseProvider(object):
>>>>>>> REPLACE
|
bombless/rust-docs-chinese
|
bac68e4af3592dfcd6867fb8a73d2999daff744f
|
src/test/run-pass/lib-list.rs
|
rust
|
apache-2.0
|
stdlib: Add regression tests for std::list
|
use std;
import std::list;
import std::list::car;
import std::list::cdr;
import std::list::from_vec;
fn test_from_vec() {
auto l = from_vec([0, 1, 2]);
assert (car(l) == 0);
assert (car(cdr(l)) == 1);
assert (car(cdr(cdr(l))) == 2);
}
fn main() {
test_from_vec();
}
|
use std;
import std::list;
import std::list::car;
import std::list::cdr;
import std::list::from_vec;
import std::option;
fn test_from_vec() {
auto l = from_vec([0, 1, 2]);
assert (car(l) == 0);
assert (car(cdr(l)) == 1);
assert (car(cdr(cdr(l))) == 2);
}
fn test_foldl() {
auto l = from_vec([0, 1, 2, 3, 4]);
fn add (&int a, &uint b) -> uint {
ret (a as uint) + b;
}
auto res = list::foldl(l, 0u, add);
assert (res == 10u);
}
fn test_find_success() {
auto l = from_vec([0, 1, 2]);
fn match (&int i) -> option::t[int] {
ret if (i == 2) {
option::some(i)
} else {
option::none[int]
};
}
auto res = list::find(l, match);
assert (res == option::some(2));
}
fn test_find_fail() {
auto l = from_vec([0, 1, 2]);
fn match (&int i) -> option::t[int] {
ret option::none[int];
}
auto res = list::find(l, match);
assert (res == option::none[int]);
}
fn test_length() {
auto l = from_vec([0, 1, 2]);
assert (list::length(l) == 3u);
}
fn main() {
test_from_vec();
test_foldl();
test_find_success();
test_find_fail();
test_length();
}
| 41
| 0
| 2
|
add_only
|
--- a/src/test/run-pass/lib-list.rs
+++ b/src/test/run-pass/lib-list.rs
@@ -5,2 +5,3 @@
import std::list::from_vec;
+import std::option;
@@ -13,4 +14,44 @@
+fn test_foldl() {
+ auto l = from_vec([0, 1, 2, 3, 4]);
+ fn add (&int a, &uint b) -> uint {
+ ret (a as uint) + b;
+ }
+ auto res = list::foldl(l, 0u, add);
+ assert (res == 10u);
+}
+
+fn test_find_success() {
+ auto l = from_vec([0, 1, 2]);
+ fn match (&int i) -> option::t[int] {
+ ret if (i == 2) {
+ option::some(i)
+ } else {
+ option::none[int]
+ };
+ }
+ auto res = list::find(l, match);
+ assert (res == option::some(2));
+}
+
+fn test_find_fail() {
+ auto l = from_vec([0, 1, 2]);
+ fn match (&int i) -> option::t[int] {
+ ret option::none[int];
+ }
+ auto res = list::find(l, match);
+ assert (res == option::none[int]);
+}
+
+fn test_length() {
+ auto l = from_vec([0, 1, 2]);
+ assert (list::length(l) == 3u);
+}
+
fn main() {
test_from_vec();
+ test_foldl();
+ test_find_success();
+ test_find_fail();
+ test_length();
}
|
--- a/src/test/run-pass/lib-list.rs
+++ b/src/test/run-pass/lib-list.rs
@@ ... @@
import std::list::from_vec;
+import std::option;
@@ ... @@
+fn test_foldl() {
+ auto l = from_vec([0, 1, 2, 3, 4]);
+ fn add (&int a, &uint b) -> uint {
+ ret (a as uint) + b;
+ }
+ auto res = list::foldl(l, 0u, add);
+ assert (res == 10u);
+}
+
+fn test_find_success() {
+ auto l = from_vec([0, 1, 2]);
+ fn match (&int i) -> option::t[int] {
+ ret if (i == 2) {
+ option::some(i)
+ } else {
+ option::none[int]
+ };
+ }
+ auto res = list::find(l, match);
+ assert (res == option::some(2));
+}
+
+fn test_find_fail() {
+ auto l = from_vec([0, 1, 2]);
+ fn match (&int i) -> option::t[int] {
+ ret option::none[int];
+ }
+ auto res = list::find(l, match);
+ assert (res == option::none[int]);
+}
+
+fn test_length() {
+ auto l = from_vec([0, 1, 2]);
+ assert (list::length(l) == 3u);
+}
+
fn main() {
test_from_vec();
+ test_foldl();
+ test_find_success();
+ test_find_fail();
+ test_length();
}
|
--- a/src/test/run-pass/lib-list.rs
+++ b/src/test/run-pass/lib-list.rs
@@ -5,2 +5,3 @@
CON import std::list::from_vec;
ADD import std::option;
CON
@@ -13,4 +14,44 @@
CON
ADD fn test_foldl() {
ADD auto l = from_vec([0, 1, 2, 3, 4]);
ADD fn add (&int a, &uint b) -> uint {
ADD ret (a as uint) + b;
ADD }
ADD auto res = list::foldl(l, 0u, add);
ADD assert (res == 10u);
ADD }
ADD
ADD fn test_find_success() {
ADD auto l = from_vec([0, 1, 2]);
ADD fn match (&int i) -> option::t[int] {
ADD ret if (i == 2) {
ADD option::some(i)
ADD } else {
ADD option::none[int]
ADD };
ADD }
ADD auto res = list::find(l, match);
ADD assert (res == option::some(2));
ADD }
ADD
ADD fn test_find_fail() {
ADD auto l = from_vec([0, 1, 2]);
ADD fn match (&int i) -> option::t[int] {
ADD ret option::none[int];
ADD }
ADD auto res = list::find(l, match);
ADD assert (res == option::none[int]);
ADD }
ADD
ADD fn test_length() {
ADD auto l = from_vec([0, 1, 2]);
ADD assert (list::length(l) == 3u);
ADD }
ADD
CON fn main() {
CON test_from_vec();
ADD test_foldl();
ADD test_find_success();
ADD test_find_fail();
ADD test_length();
CON }
|
<<<<<<< SEARCH
import std::list::cdr;
import std::list::from_vec;
fn test_from_vec() {
=======
import std::list::cdr;
import std::list::from_vec;
import std::option;
fn test_from_vec() {
>>>>>>> REPLACE
<<<<<<< SEARCH
}
fn main() {
test_from_vec();
}
=======
}
fn test_foldl() {
auto l = from_vec([0, 1, 2, 3, 4]);
fn add (&int a, &uint b) -> uint {
ret (a as uint) + b;
}
auto res = list::foldl(l, 0u, add);
assert (res == 10u);
}
fn test_find_success() {
auto l = from_vec([0, 1, 2]);
fn match (&int i) -> option::t[int] {
ret if (i == 2) {
option::some(i)
} else {
option::none[int]
};
}
auto res = list::find(l, match);
assert (res == option::some(2));
}
fn test_find_fail() {
auto l = from_vec([0, 1, 2]);
fn match (&int i) -> option::t[int] {
ret option::none[int];
}
auto res = list::find(l, match);
assert (res == option::none[int]);
}
fn test_length() {
auto l = from_vec([0, 1, 2]);
assert (list::length(l) == 3u);
}
fn main() {
test_from_vec();
test_foldl();
test_find_success();
test_find_fail();
test_length();
}
>>>>>>> REPLACE
|
world-federation-of-advertisers/common-jvm
|
b0637ab61d37372b4691d5eeef258b5b1b8a4510
|
src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
|
kotlin
|
apache-2.0
|
Initialize properties on construction in Spanner testing AutoCloseables.
This more closely follows common AutoCloseable semantics.
Change-Id: I0fb47cf30ac2c17340791f4cc4572b706a816fa9
|
// Copyright 2020 The Measurement System Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.wfanet.measurement.common.testing
import org.junit.rules.TestRule
import org.junit.runner.Description
import org.junit.runners.model.Statement
/**
* [TestRule] that ensures that the given [AutoCloseable] resource is closed after statement
* evaluation.
*
* This can replace [ExternalResource][org.junit.rules.ExternalResource], which does not guarantee
* that [after][org.junit.rules.ExternalResource.after] is invoked if
* [before][org.junit.rules.ExternalResource.before] throws an exception.
*/
open class CloseableResource<T : AutoCloseable>(lazyResource: () -> T) : TestRule {
protected val resource by lazy { lazyResource() }
override fun apply(base: Statement, description: Description) = object : Statement() {
override fun evaluate() {
try {
resource.use {
before()
base.evaluate()
}
} finally {
after()
}
}
}
open fun before() {}
open fun after() {}
}
|
// Copyright 2020 The Measurement System Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.wfanet.measurement.common.testing
import org.junit.rules.TestRule
import org.junit.runner.Description
import org.junit.runners.model.Statement
/**
* [TestRule] that ensures that the given [AutoCloseable] resource is closed after statement
* evaluation.
*
* This can replace [ExternalResource][org.junit.rules.ExternalResource], which does not guarantee
* that [after][org.junit.rules.ExternalResource.after] is invoked if
* [before][org.junit.rules.ExternalResource.before] throws an exception.
*/
open class CloseableResource<T : AutoCloseable>(private val createResource: () -> T) : TestRule {
protected lateinit var resource: T
private set
override fun apply(base: Statement, description: Description) = object : Statement() {
override fun evaluate() {
check(!::resource.isInitialized)
resource = createResource()
resource.use { base.evaluate() }
}
}
}
| 7
| 14
| 2
|
mixed
|
--- a/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
+++ b/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
@@ -28,4 +28,5 @@
*/
-open class CloseableResource<T : AutoCloseable>(lazyResource: () -> T) : TestRule {
- protected val resource by lazy { lazyResource() }
+open class CloseableResource<T : AutoCloseable>(private val createResource: () -> T) : TestRule {
+ protected lateinit var resource: T
+ private set
@@ -33,16 +34,8 @@
override fun evaluate() {
- try {
- resource.use {
- before()
- base.evaluate()
- }
- } finally {
- after()
- }
+ check(!::resource.isInitialized)
+
+ resource = createResource()
+ resource.use { base.evaluate() }
}
}
-
- open fun before() {}
-
- open fun after() {}
}
|
--- a/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
+++ b/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
@@ ... @@
*/
-open class CloseableResource<T : AutoCloseable>(lazyResource: () -> T) : TestRule {
- protected val resource by lazy { lazyResource() }
+open class CloseableResource<T : AutoCloseable>(private val createResource: () -> T) : TestRule {
+ protected lateinit var resource: T
+ private set
@@ ... @@
override fun evaluate() {
- try {
- resource.use {
- before()
- base.evaluate()
- }
- } finally {
- after()
- }
+ check(!::resource.isInitialized)
+
+ resource = createResource()
+ resource.use { base.evaluate() }
}
}
-
- open fun before() {}
-
- open fun after() {}
}
|
--- a/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
+++ b/src/main/kotlin/org/wfanet/measurement/common/testing/CloseableResource.kt
@@ -28,4 +28,5 @@
CON */
DEL open class CloseableResource<T : AutoCloseable>(lazyResource: () -> T) : TestRule {
DEL protected val resource by lazy { lazyResource() }
ADD open class CloseableResource<T : AutoCloseable>(private val createResource: () -> T) : TestRule {
ADD protected lateinit var resource: T
ADD private set
CON
@@ -33,16 +34,8 @@
CON override fun evaluate() {
DEL try {
DEL resource.use {
DEL before()
DEL base.evaluate()
DEL }
DEL } finally {
DEL after()
DEL }
ADD check(!::resource.isInitialized)
ADD
ADD resource = createResource()
ADD resource.use { base.evaluate() }
CON }
CON }
DEL
DEL open fun before() {}
DEL
DEL open fun after() {}
CON }
|
<<<<<<< SEARCH
* [before][org.junit.rules.ExternalResource.before] throws an exception.
*/
open class CloseableResource<T : AutoCloseable>(lazyResource: () -> T) : TestRule {
protected val resource by lazy { lazyResource() }
override fun apply(base: Statement, description: Description) = object : Statement() {
override fun evaluate() {
try {
resource.use {
before()
base.evaluate()
}
} finally {
after()
}
}
}
open fun before() {}
open fun after() {}
}
=======
* [before][org.junit.rules.ExternalResource.before] throws an exception.
*/
open class CloseableResource<T : AutoCloseable>(private val createResource: () -> T) : TestRule {
protected lateinit var resource: T
private set
override fun apply(base: Statement, description: Description) = object : Statement() {
override fun evaluate() {
check(!::resource.isInitialized)
resource = createResource()
resource.use { base.evaluate() }
}
}
}
>>>>>>> REPLACE
|
BitLimit/BlockRegression
|
486b3d3ab6d2cca20ec47736b5e6fa45bfe869e1
|
src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
|
java
|
mit
|
Check before casting a null.
|
package com.kolinkrewinkel.BitLimitBlockRegression;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Random;
/**
* Created with IntelliJ IDEA.
* User: kolin
* Date: 7/14/13
* Time: 4:26 PM
* To change this template use File | Settings | File Templates.
*/
public class BlockGrowthManager {
private final BitLimitBlockRegression plugin;
public BlockGrowthManager(BitLimitBlockRegression plugin) {
this.plugin = plugin;
this.startRandomizationEvents();
}
private void startRandomizationEvents() {
class RepeatingGrowthTask implements Runnable {
private final BitLimitBlockRegression plugin;
public RepeatingGrowthTask(BitLimitBlockRegression plugin) {
this.plugin = plugin;
}
@Override
public void run() {
ArrayList<HashMap> conditionsList = (ArrayList<HashMap>) plugin.getConfig().get("conditions");
if (conditionsList != null) {
} else {
Bukkit.broadcastMessage(ChatColor.RED + "No conditions to grow were found.");
}
}
}
Bukkit.getScheduler().runTaskTimer(this.plugin, new RepeatingGrowthTask(this.plugin), 20L, 0L);
}
boolean randomWithLikelihood(float likelihood) {
Random rand = new Random();
return (rand.nextInt((int)likelihood * 100) == 0);
}
}
|
package com.kolinkrewinkel.BitLimitBlockRegression;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Random;
/**
* Created with IntelliJ IDEA.
* User: kolin
* Date: 7/14/13
* Time: 4:26 PM
* To change this template use File | Settings | File Templates.
*/
public class BlockGrowthManager {
private final BitLimitBlockRegression plugin;
public BlockGrowthManager(BitLimitBlockRegression plugin) {
this.plugin = plugin;
this.startRandomizationEvents();
}
private void startRandomizationEvents() {
class RepeatingGrowthTask implements Runnable {
private final BitLimitBlockRegression plugin;
public RepeatingGrowthTask(BitLimitBlockRegression plugin) {
this.plugin = plugin;
}
@Override
public void run() {
Object rawConditions = plugin.getConfig().get("conditions");
ArrayList<HashMap> conditionsList = null;
if (rawConditions != null) {
conditionsList = (ArrayList<HashMap>)rawConditions;
}
if (conditionsList != null) {
} else {
Bukkit.broadcastMessage(ChatColor.RED + "No conditions to grow were found.");
}
}
}
Bukkit.getScheduler().runTaskTimer(this.plugin, new RepeatingGrowthTask(this.plugin), 20L, 0L);
}
boolean randomWithLikelihood(float likelihood) {
Random rand = new Random();
return (rand.nextInt((int)likelihood * 100) == 0);
}
}
| 9
| 1
| 1
|
mixed
|
--- a/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
+++ b/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
@@ -37,3 +37,11 @@
public void run() {
- ArrayList<HashMap> conditionsList = (ArrayList<HashMap>) plugin.getConfig().get("conditions");
+
+ Object rawConditions = plugin.getConfig().get("conditions");
+ ArrayList<HashMap> conditionsList = null;
+
+ if (rawConditions != null) {
+ conditionsList = (ArrayList<HashMap>)rawConditions;
+ }
+
+
if (conditionsList != null) {
|
--- a/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
+++ b/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
@@ ... @@
public void run() {
- ArrayList<HashMap> conditionsList = (ArrayList<HashMap>) plugin.getConfig().get("conditions");
+
+ Object rawConditions = plugin.getConfig().get("conditions");
+ ArrayList<HashMap> conditionsList = null;
+
+ if (rawConditions != null) {
+ conditionsList = (ArrayList<HashMap>)rawConditions;
+ }
+
+
if (conditionsList != null) {
|
--- a/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
+++ b/src/main/java/com/kolinkrewinkel/BitLimitBlockRegression/BlockGrowthManager.java
@@ -37,3 +37,11 @@
CON public void run() {
DEL ArrayList<HashMap> conditionsList = (ArrayList<HashMap>) plugin.getConfig().get("conditions");
ADD
ADD Object rawConditions = plugin.getConfig().get("conditions");
ADD ArrayList<HashMap> conditionsList = null;
ADD
ADD if (rawConditions != null) {
ADD conditionsList = (ArrayList<HashMap>)rawConditions;
ADD }
ADD
ADD
CON if (conditionsList != null) {
|
<<<<<<< SEARCH
@Override
public void run() {
ArrayList<HashMap> conditionsList = (ArrayList<HashMap>) plugin.getConfig().get("conditions");
if (conditionsList != null) {
=======
@Override
public void run() {
Object rawConditions = plugin.getConfig().get("conditions");
ArrayList<HashMap> conditionsList = null;
if (rawConditions != null) {
conditionsList = (ArrayList<HashMap>)rawConditions;
}
if (conditionsList != null) {
>>>>>>> REPLACE
|
intellij-purescript/intellij-purescript
|
fe37475aed72028110a9b1567c83a0fd88833cfd
|
lexer/src/main/kotlin/org/purescript/PSLanguage.kt
|
kotlin
|
bsd-3-clause
|
Add all declarations from Prim to BUILTIN_TYPES
|
package org.purescript
import com.intellij.lang.Language
class PSLanguage : Language("Purescript", "text/purescript", "text/x-purescript", "application/x-purescript") {
companion object {
val INSTANCE = PSLanguage()
/**
* These modules are built into the purescript compiler,
* and have no corresponding source files.
*
* See [https://pursuit.purescript.org/builtins/docs/Prim] for details.
*/
val BUILTIN_MODULES = setOf(
"Prim",
"Prim.Boolean",
"Prim.Coerce",
"Prim.Ordering",
"Prim.Row",
"Prim.RowList",
"Prim.Symbol",
"Prim.TypeError",
)
/**
* These types are built into the purescript compiles,
* and are always available.
*
* See [https://pursuit.purescript.org/builtins/docs/Prim] for details.
*/
val BUILTIN_TYPES = setOf(
"Int",
"Number",
"String",
"Char",
"Boolean",
"Array",
"Type", // TODO Type is really a kind, not a type
"Row", // TODO Row is really a kind, not a type
)
}
}
|
package org.purescript
import com.intellij.lang.Language
class PSLanguage : Language("Purescript", "text/purescript", "text/x-purescript", "application/x-purescript") {
companion object {
val INSTANCE = PSLanguage()
/**
* These modules are built into the purescript compiler,
* and have no corresponding source files.
*
* See [https://pursuit.purescript.org/builtins/docs/Prim] for details.
*/
val BUILTIN_MODULES = setOf(
"Prim",
"Prim.Boolean",
"Prim.Coerce",
"Prim.Ordering",
"Prim.Row",
"Prim.RowList",
"Prim.Symbol",
"Prim.TypeError",
)
/**
* These types are built into the purescript compiles,
* and are always available.
*
* See [https://pursuit.purescript.org/builtins/docs/Prim] for details.
*/
val BUILTIN_TYPES = setOf(
"Function", // TODO Function is really a kind, not a type
"Record", // TODO Record is really a kind, not a type
"Partial", // TODO Partial is really a Class, not a type
"Constraint", // TODO Constraint is really a kind, not a type
"Symbol",
"Int",
"Number",
"String",
"Char",
"Boolean",
"Array",
"Type", // TODO Type is really a kind, not a type
"Row", // TODO Row is really a kind, not a type
)
}
}
| 5
| 0
| 1
|
add_only
|
--- a/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
+++ b/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
@@ -32,2 +32,7 @@
val BUILTIN_TYPES = setOf(
+ "Function", // TODO Function is really a kind, not a type
+ "Record", // TODO Record is really a kind, not a type
+ "Partial", // TODO Partial is really a Class, not a type
+ "Constraint", // TODO Constraint is really a kind, not a type
+ "Symbol",
"Int",
|
--- a/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
+++ b/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
@@ ... @@
val BUILTIN_TYPES = setOf(
+ "Function", // TODO Function is really a kind, not a type
+ "Record", // TODO Record is really a kind, not a type
+ "Partial", // TODO Partial is really a Class, not a type
+ "Constraint", // TODO Constraint is really a kind, not a type
+ "Symbol",
"Int",
|
--- a/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
+++ b/lexer/src/main/kotlin/org/purescript/PSLanguage.kt
@@ -32,2 +32,7 @@
CON val BUILTIN_TYPES = setOf(
ADD "Function", // TODO Function is really a kind, not a type
ADD "Record", // TODO Record is really a kind, not a type
ADD "Partial", // TODO Partial is really a Class, not a type
ADD "Constraint", // TODO Constraint is really a kind, not a type
ADD "Symbol",
CON "Int",
|
<<<<<<< SEARCH
*/
val BUILTIN_TYPES = setOf(
"Int",
"Number",
=======
*/
val BUILTIN_TYPES = setOf(
"Function", // TODO Function is really a kind, not a type
"Record", // TODO Record is really a kind, not a type
"Partial", // TODO Partial is really a Class, not a type
"Constraint", // TODO Constraint is really a kind, not a type
"Symbol",
"Int",
"Number",
>>>>>>> REPLACE
|
googleinterns/step57-2020
|
05d81ff20390ac802a1ac2e914cf00a113a4b0cd
|
src/main/java/util/UserAuthUtil.java
|
java
|
apache-2.0
|
Add javadoc for authentication utility
|
package util;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
public class UserAuthUtil {
/**
* Returns a boolean for the user's login status
* @return user login status
*/
public static boolean isUserLoggedIn() {
UserService userServ = UserServiceFactory.getUserService();
return userServ.isUserLoggedIn();
}
/**
* @param redirect URL for webpage to return to after login
* @return
*/
public static String getLoginURL(String redirect) {
UserService userServ = UserServiceFactory.getUserService();
return userServ.createLoginURL(redirect);
}
public static String getLogoutURL(String redirect) {
return UserServiceFactory.getUserService().createLogoutURL(redirect);
}
public static User getUser() {
return UserServiceFactory.getUserService().getCurrentUser();
}
public static boolean isUserAuthorized() {
return getDomainName().equals("google.com");
}
private static String getDomainName() {
String email = getUser().getEmail();
return email.substring(email.indexOf('@') + 1);
}
}
|
package util;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
public class UserAuthUtil {
/**
* Returns a boolean for the user's login status
* @return user login status
*/
public static boolean isUserLoggedIn() {
UserService userServ = UserServiceFactory.getUserService();
return userServ.isUserLoggedIn();
}
/**
* @param redirect URL for webpage to return to after login
* @return URL for user to click to login
*/
public static String getLoginURL(String redirect) {
UserService userServ = UserServiceFactory.getUserService();
return userServ.createLoginURL(redirect);
}
/**
* @param redirect URL for webpage to return to after logout
* @return URL for user to click to logout
*/
public static String getLogoutURL(String redirect) {
return UserServiceFactory.getUserService().createLogoutURL(redirect);
}
/**
* Helper method to return a User object
*/
public static User getUser() {
return UserServiceFactory.getUserService().getCurrentUser();
}
/**
* Determines whether a user is authorized to use the requested resource
* @return true when the user's email domain is "google.com"
*/
public static boolean isUserAuthorized() {
return getDomainName().equals("google.com");
}
/**
* @return domain name from a user's email address
*/
private static String getDomainName() {
String email = getUser().getEmail();
return email.substring(email.indexOf('@') + 1);
}
}
| 18
| 1
| 5
|
mixed
|
--- a/src/main/java/util/UserAuthUtil.java
+++ b/src/main/java/util/UserAuthUtil.java
@@ -18,3 +18,3 @@
* @param redirect URL for webpage to return to after login
- * @return
+ * @return URL for user to click to login
*/
@@ -24,2 +24,7 @@
}
+
+ /**
+ * @param redirect URL for webpage to return to after logout
+ * @return URL for user to click to logout
+ */
public static String getLogoutURL(String redirect) {
@@ -27,2 +32,6 @@
}
+
+ /**
+ * Helper method to return a User object
+ */
public static User getUser() {
@@ -30,2 +39,7 @@
}
+
+ /**
+ * Determines whether a user is authorized to use the requested resource
+ * @return true when the user's email domain is "google.com"
+ */
public static boolean isUserAuthorized() {
@@ -34,2 +48,5 @@
+ /**
+ * @return domain name from a user's email address
+ */
private static String getDomainName() {
|
--- a/src/main/java/util/UserAuthUtil.java
+++ b/src/main/java/util/UserAuthUtil.java
@@ ... @@
* @param redirect URL for webpage to return to after login
- * @return
+ * @return URL for user to click to login
*/
@@ ... @@
}
+
+ /**
+ * @param redirect URL for webpage to return to after logout
+ * @return URL for user to click to logout
+ */
public static String getLogoutURL(String redirect) {
@@ ... @@
}
+
+ /**
+ * Helper method to return a User object
+ */
public static User getUser() {
@@ ... @@
}
+
+ /**
+ * Determines whether a user is authorized to use the requested resource
+ * @return true when the user's email domain is "google.com"
+ */
public static boolean isUserAuthorized() {
@@ ... @@
+ /**
+ * @return domain name from a user's email address
+ */
private static String getDomainName() {
|
--- a/src/main/java/util/UserAuthUtil.java
+++ b/src/main/java/util/UserAuthUtil.java
@@ -18,3 +18,3 @@
CON * @param redirect URL for webpage to return to after login
DEL * @return
ADD * @return URL for user to click to login
CON */
@@ -24,2 +24,7 @@
CON }
ADD
ADD /**
ADD * @param redirect URL for webpage to return to after logout
ADD * @return URL for user to click to logout
ADD */
CON public static String getLogoutURL(String redirect) {
@@ -27,2 +32,6 @@
CON }
ADD
ADD /**
ADD * Helper method to return a User object
ADD */
CON public static User getUser() {
@@ -30,2 +39,7 @@
CON }
ADD
ADD /**
ADD * Determines whether a user is authorized to use the requested resource
ADD * @return true when the user's email domain is "google.com"
ADD */
CON public static boolean isUserAuthorized() {
@@ -34,2 +48,5 @@
CON
ADD /**
ADD * @return domain name from a user's email address
ADD */
CON private static String getDomainName() {
|
<<<<<<< SEARCH
/**
* @param redirect URL for webpage to return to after login
* @return
*/
public static String getLoginURL(String redirect) {
UserService userServ = UserServiceFactory.getUserService();
return userServ.createLoginURL(redirect);
}
public static String getLogoutURL(String redirect) {
return UserServiceFactory.getUserService().createLogoutURL(redirect);
}
public static User getUser() {
return UserServiceFactory.getUserService().getCurrentUser();
}
public static boolean isUserAuthorized() {
return getDomainName().equals("google.com");
}
private static String getDomainName() {
String email = getUser().getEmail();
=======
/**
* @param redirect URL for webpage to return to after login
* @return URL for user to click to login
*/
public static String getLoginURL(String redirect) {
UserService userServ = UserServiceFactory.getUserService();
return userServ.createLoginURL(redirect);
}
/**
* @param redirect URL for webpage to return to after logout
* @return URL for user to click to logout
*/
public static String getLogoutURL(String redirect) {
return UserServiceFactory.getUserService().createLogoutURL(redirect);
}
/**
* Helper method to return a User object
*/
public static User getUser() {
return UserServiceFactory.getUserService().getCurrentUser();
}
/**
* Determines whether a user is authorized to use the requested resource
* @return true when the user's email domain is "google.com"
*/
public static boolean isUserAuthorized() {
return getDomainName().equals("google.com");
}
/**
* @return domain name from a user's email address
*/
private static String getDomainName() {
String email = getUser().getEmail();
>>>>>>> REPLACE
|
blindpirate/gradle
|
64bee71e255b0e0f5359bf63fd4b8e26408b613c
|
subprojects/integ-test/integ-test.gradle.kts
|
kotlin
|
apache-2.0
|
Remove usage of `mavenLocal()` from `:integTest`
|
import org.gradle.gradlebuild.test.integrationtests.IntegrationTest
import org.gradle.gradlebuild.testing.integrationtests.cleanup.WhenNotEmpty
import org.gradle.gradlebuild.unittestandcompile.ModuleType
plugins {
gradlebuild.classycle
}
repositories {
mavenLocal()
}
dependencies {
integTestCompile(library("groovy"))
integTestCompile(library("ant"))
integTestCompile(testLibrary("jsoup"))
integTestCompile(testLibrary("sampleCheck")) {
exclude(group = "org.codehaus.groovy", module = "groovy-all")
exclude(module = "slf4j-simple")
}
val allTestRuntimeDependencies: DependencySet by rootProject.extra
allTestRuntimeDependencies.forEach {
integTestRuntime(it)
}
crossVersionTestCompile(project(":scala"))
crossVersionTestCompile(project(":ide"))
crossVersionTestCompile(project(":codeQuality"))
crossVersionTestCompile(project(":signing"))
allTestRuntimeDependencies.forEach {
crossVersionTestRuntime(it)
}
}
gradlebuildJava {
moduleType = ModuleType.INTERNAL
}
testFixtures {
from(":diagnostics", "integTest")
from(":platformNative", "integTest")
}
val integTestTasks: DomainObjectCollection<IntegrationTest> by extra
integTestTasks.configureEach {
libsRepository.required = true
}
testFilesCleanup {
policy.set(WhenNotEmpty.REPORT)
}
|
import org.gradle.gradlebuild.test.integrationtests.IntegrationTest
import org.gradle.gradlebuild.testing.integrationtests.cleanup.WhenNotEmpty
import org.gradle.gradlebuild.unittestandcompile.ModuleType
plugins {
gradlebuild.classycle
}
dependencies {
integTestCompile(library("groovy"))
integTestCompile(library("ant"))
integTestCompile(testLibrary("jsoup"))
integTestCompile(testLibrary("sampleCheck")) {
exclude(group = "org.codehaus.groovy", module = "groovy-all")
exclude(module = "slf4j-simple")
}
val allTestRuntimeDependencies: DependencySet by rootProject.extra
allTestRuntimeDependencies.forEach {
integTestRuntime(it)
}
crossVersionTestCompile(project(":scala"))
crossVersionTestCompile(project(":ide"))
crossVersionTestCompile(project(":codeQuality"))
crossVersionTestCompile(project(":signing"))
allTestRuntimeDependencies.forEach {
crossVersionTestRuntime(it)
}
}
gradlebuildJava {
moduleType = ModuleType.INTERNAL
}
testFixtures {
from(":diagnostics", "integTest")
from(":platformNative", "integTest")
}
val integTestTasks: DomainObjectCollection<IntegrationTest> by extra
integTestTasks.configureEach {
libsRepository.required = true
}
testFilesCleanup {
policy.set(WhenNotEmpty.REPORT)
}
| 0
| 4
| 1
|
del_only
|
--- a/subprojects/integ-test/integ-test.gradle.kts
+++ b/subprojects/integ-test/integ-test.gradle.kts
@@ -6,6 +6,2 @@
gradlebuild.classycle
-}
-
-repositories {
- mavenLocal()
}
|
--- a/subprojects/integ-test/integ-test.gradle.kts
+++ b/subprojects/integ-test/integ-test.gradle.kts
@@ ... @@
gradlebuild.classycle
-}
-
-repositories {
- mavenLocal()
}
|
--- a/subprojects/integ-test/integ-test.gradle.kts
+++ b/subprojects/integ-test/integ-test.gradle.kts
@@ -6,6 +6,2 @@
CON gradlebuild.classycle
DEL }
DEL
DEL repositories {
DEL mavenLocal()
CON }
|
<<<<<<< SEARCH
plugins {
gradlebuild.classycle
}
repositories {
mavenLocal()
}
=======
plugins {
gradlebuild.classycle
}
>>>>>>> REPLACE
|
EvilMcJerkface/atlasdb
|
219557a6c70df9bbb9cea1249e262a46af969977
|
atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
|
java
|
apache-2.0
|
Make locks tables begin with _locks_, not just _locks
|
/**
* Copyright 2016 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.keyvalue.cassandra;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import com.palantir.atlasdb.AtlasDbConstants;
import com.palantir.atlasdb.keyvalue.api.TableReference;
class HiddenTables {
private TableReference lockTable;
private final Set<TableReference> hiddenTables;
static final String LOCK_TABLE_PREFIX = "_locks";
HiddenTables() {
this.hiddenTables = ImmutableSet.of(
AtlasDbConstants.TIMESTAMP_TABLE,
AtlasDbConstants.METADATA_TABLE);
}
boolean isHidden(TableReference tableReference) {
return hiddenTables.contains(tableReference) || (tableReference != null && tableReference.equals(lockTable));
}
TableReference getLockTable() {
return lockTable;
}
void setLockTable(TableReference lockTable) {
this.lockTable = lockTable;
}
}
|
/**
* Copyright 2016 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.keyvalue.cassandra;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import com.palantir.atlasdb.AtlasDbConstants;
import com.palantir.atlasdb.keyvalue.api.TableReference;
class HiddenTables {
private TableReference lockTable;
private final Set<TableReference> hiddenTables;
static final String LOCK_TABLE_PREFIX = "_locks_";
HiddenTables() {
this.hiddenTables = ImmutableSet.of(
AtlasDbConstants.TIMESTAMP_TABLE,
AtlasDbConstants.METADATA_TABLE);
}
boolean isHidden(TableReference tableReference) {
return hiddenTables.contains(tableReference) || (tableReference != null && tableReference.equals(lockTable));
}
TableReference getLockTable() {
return lockTable;
}
void setLockTable(TableReference lockTable) {
this.lockTable = lockTable;
}
}
| 1
| 1
| 1
|
mixed
|
--- a/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
+++ b/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
@@ -26,3 +26,3 @@
private final Set<TableReference> hiddenTables;
- static final String LOCK_TABLE_PREFIX = "_locks";
+ static final String LOCK_TABLE_PREFIX = "_locks_";
|
--- a/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
+++ b/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
@@ ... @@
private final Set<TableReference> hiddenTables;
- static final String LOCK_TABLE_PREFIX = "_locks";
+ static final String LOCK_TABLE_PREFIX = "_locks_";
|
--- a/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
+++ b/atlasdb-cassandra/src/main/java/com/palantir/atlasdb/keyvalue/cassandra/HiddenTables.java
@@ -26,3 +26,3 @@
CON private final Set<TableReference> hiddenTables;
DEL static final String LOCK_TABLE_PREFIX = "_locks";
ADD static final String LOCK_TABLE_PREFIX = "_locks_";
CON
|
<<<<<<< SEARCH
private TableReference lockTable;
private final Set<TableReference> hiddenTables;
static final String LOCK_TABLE_PREFIX = "_locks";
=======
private TableReference lockTable;
private final Set<TableReference> hiddenTables;
static final String LOCK_TABLE_PREFIX = "_locks_";
>>>>>>> REPLACE
|
arturbosch/detekt
|
df23e6189b88beab3524fcef76269b95a23b6ccd
|
detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
|
kotlin
|
apache-2.0
|
Add test for more than one filter for compiler
|
package io.gitlab.arturbosch.detekt.core
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.describe
import org.jetbrains.spek.api.dsl.it
import kotlin.test.assertNull
import kotlin.test.assertTrue
/**
* @author Artur Bosch
*/
class KtTreeCompilerSpec : Spek({
describe("tree compiler functionality") {
it("should compile all files") {
val ktFiles = KtTreeCompiler(path).compile()
assertTrue(ktFiles.size >= 2, "It should compile more than two files, but did ${ktFiles.size}")
}
it("should filter the file 'Default.kt'") {
val filter = PathFilter(".*Default.kt")
val ktFiles = KtTreeCompiler(path, listOf(filter)).compile()
val ktFile = ktFiles.find { it.name == "Default.kt" }
assertNull(ktFile, "It should have no Default.kt file")
}
it("should also compile regular files") {
assertTrue { KtTreeCompiler(path.resolve("Default.kt")).compile().size == 1 }
}
}
})
|
package io.gitlab.arturbosch.detekt.core
import org.assertj.core.api.Assertions.assertThat
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.describe
import org.jetbrains.spek.api.dsl.it
import kotlin.test.assertNull
import kotlin.test.assertTrue
/**
* @author Artur Bosch
*/
class KtTreeCompilerSpec : Spek({
describe("tree compiler functionality") {
it("should compile all files") {
val ktFiles = KtTreeCompiler(path).compile()
assertTrue(ktFiles.size >= 2, "It should compile more than two files, but did ${ktFiles.size}")
}
it("should filter the file 'Default.kt'") {
val filter = PathFilter(".*Default.kt")
val ktFiles = KtTreeCompiler(path, listOf(filter)).compile()
val ktFile = ktFiles.find { it.name == "Default.kt" }
assertNull(ktFile, "It should have no Default.kt file")
}
it("should work with two or more filters") {
val filter = PathFilter(".*Default.kt")
val filterTwo = PathFilter(".*Test.*")
val filterThree = PathFilter(".*Complex.*")
val ktFiles = KtTreeCompiler(path, listOf(filter, filterTwo, filterThree)).compile()
assertThat(ktFiles).isEmpty()
}
it("should also compile regular files") {
assertTrue { KtTreeCompiler(path.resolve("Default.kt")).compile().size == 1 }
}
}
})
| 9
| 0
| 2
|
add_only
|
--- a/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
+++ b/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
@@ -2,2 +2,3 @@
+import org.assertj.core.api.Assertions.assertThat
import org.jetbrains.spek.api.Spek
@@ -27,2 +28,10 @@
+ it("should work with two or more filters") {
+ val filter = PathFilter(".*Default.kt")
+ val filterTwo = PathFilter(".*Test.*")
+ val filterThree = PathFilter(".*Complex.*")
+ val ktFiles = KtTreeCompiler(path, listOf(filter, filterTwo, filterThree)).compile()
+ assertThat(ktFiles).isEmpty()
+ }
+
it("should also compile regular files") {
|
--- a/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
+++ b/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
@@ ... @@
+import org.assertj.core.api.Assertions.assertThat
import org.jetbrains.spek.api.Spek
@@ ... @@
+ it("should work with two or more filters") {
+ val filter = PathFilter(".*Default.kt")
+ val filterTwo = PathFilter(".*Test.*")
+ val filterThree = PathFilter(".*Complex.*")
+ val ktFiles = KtTreeCompiler(path, listOf(filter, filterTwo, filterThree)).compile()
+ assertThat(ktFiles).isEmpty()
+ }
+
it("should also compile regular files") {
|
--- a/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
+++ b/detekt-core/src/test/kotlin/io/gitlab/arturbosch/detekt/core/KtTreeCompilerSpec.kt
@@ -2,2 +2,3 @@
CON
ADD import org.assertj.core.api.Assertions.assertThat
CON import org.jetbrains.spek.api.Spek
@@ -27,2 +28,10 @@
CON
ADD it("should work with two or more filters") {
ADD val filter = PathFilter(".*Default.kt")
ADD val filterTwo = PathFilter(".*Test.*")
ADD val filterThree = PathFilter(".*Complex.*")
ADD val ktFiles = KtTreeCompiler(path, listOf(filter, filterTwo, filterThree)).compile()
ADD assertThat(ktFiles).isEmpty()
ADD }
ADD
CON it("should also compile regular files") {
|
<<<<<<< SEARCH
package io.gitlab.arturbosch.detekt.core
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.describe
=======
package io.gitlab.arturbosch.detekt.core
import org.assertj.core.api.Assertions.assertThat
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.describe
>>>>>>> REPLACE
<<<<<<< SEARCH
}
it("should also compile regular files") {
assertTrue { KtTreeCompiler(path.resolve("Default.kt")).compile().size == 1 }
=======
}
it("should work with two or more filters") {
val filter = PathFilter(".*Default.kt")
val filterTwo = PathFilter(".*Test.*")
val filterThree = PathFilter(".*Complex.*")
val ktFiles = KtTreeCompiler(path, listOf(filter, filterTwo, filterThree)).compile()
assertThat(ktFiles).isEmpty()
}
it("should also compile regular files") {
assertTrue { KtTreeCompiler(path.resolve("Default.kt")).compile().size == 1 }
>>>>>>> REPLACE
|
intellij-purescript/intellij-purescript
|
5ab9a5ba9e98af59e0c3bd8118f194772493d0c6
|
src/main/kotlin/org/purescript/ide/purs/Npm.kt
|
kotlin
|
bsd-3-clause
|
Remove explicit use of default value
|
package org.purescript.ide.purs
import com.intellij.openapi.diagnostic.logger
import com.intellij.openapi.util.SystemInfo
import com.intellij.util.io.exists
import java.nio.file.Path
import java.util.concurrent.TimeUnit
class Npm {
companion object {
private val localBinPath: String by lazy { run("npm bin") }
private val globalBinPath: String by lazy { run("npm -g bin") }
private fun run(command: String): String {
val npmCmd = when {
SystemInfo.isWindows -> listOf("cmd", "/c", command)
else -> listOf("/usr/bin/env", "bash", "-c", command)
}
val npmProc = ProcessBuilder(npmCmd)
.redirectError(ProcessBuilder.Redirect.PIPE)
.redirectOutput(ProcessBuilder.Redirect.PIPE)
.start()
npmProc.waitFor(4, TimeUnit.SECONDS)
return npmProc.inputStream.bufferedReader().readLine()
}
private val log = logger<Npm>()
fun pathFor(command: String): Path? {
val binary = when {
SystemInfo.isWindows -> "$command.cmd"
else -> command
}
val localCommand = Path.of(localBinPath, binary)
if (localCommand.exists()) return localCommand
val globalCommand = Path.of(globalBinPath, binary)
if (globalCommand.exists()) return globalCommand
if (log.isDebugEnabled) log.debug("$command is not found")
return null
}
}
}
|
package org.purescript.ide.purs
import com.intellij.openapi.diagnostic.logger
import com.intellij.openapi.util.SystemInfo
import com.intellij.util.io.exists
import java.nio.file.Path
import java.util.concurrent.TimeUnit
class Npm {
companion object {
private val localBinPath: String by lazy { run("npm bin") }
private val globalBinPath: String by lazy { run("npm -g bin") }
private fun run(command: String): String {
val npmCmd = when {
SystemInfo.isWindows -> listOf("cmd", "/c", command)
else -> listOf("/usr/bin/env", "bash", "-c", command)
}
val npmProc = ProcessBuilder(npmCmd).start()
npmProc.waitFor(4, TimeUnit.SECONDS)
return npmProc.inputStream.bufferedReader().readLine()
}
private val log = logger<Npm>()
fun pathFor(command: String): Path? {
val binary = when {
SystemInfo.isWindows -> "$command.cmd"
else -> command
}
val localCommand = Path.of(localBinPath, binary)
if (localCommand.exists()) return localCommand
val globalCommand = Path.of(globalBinPath, binary)
if (globalCommand.exists()) return globalCommand
if (log.isDebugEnabled) log.debug("$command is not found")
return null
}
}
}
| 1
| 4
| 1
|
mixed
|
--- a/src/main/kotlin/org/purescript/ide/purs/Npm.kt
+++ b/src/main/kotlin/org/purescript/ide/purs/Npm.kt
@@ -20,6 +20,3 @@
}
- val npmProc = ProcessBuilder(npmCmd)
- .redirectError(ProcessBuilder.Redirect.PIPE)
- .redirectOutput(ProcessBuilder.Redirect.PIPE)
- .start()
+ val npmProc = ProcessBuilder(npmCmd).start()
npmProc.waitFor(4, TimeUnit.SECONDS)
|
--- a/src/main/kotlin/org/purescript/ide/purs/Npm.kt
+++ b/src/main/kotlin/org/purescript/ide/purs/Npm.kt
@@ ... @@
}
- val npmProc = ProcessBuilder(npmCmd)
- .redirectError(ProcessBuilder.Redirect.PIPE)
- .redirectOutput(ProcessBuilder.Redirect.PIPE)
- .start()
+ val npmProc = ProcessBuilder(npmCmd).start()
npmProc.waitFor(4, TimeUnit.SECONDS)
|
--- a/src/main/kotlin/org/purescript/ide/purs/Npm.kt
+++ b/src/main/kotlin/org/purescript/ide/purs/Npm.kt
@@ -20,6 +20,3 @@
CON }
DEL val npmProc = ProcessBuilder(npmCmd)
DEL .redirectError(ProcessBuilder.Redirect.PIPE)
DEL .redirectOutput(ProcessBuilder.Redirect.PIPE)
DEL .start()
ADD val npmProc = ProcessBuilder(npmCmd).start()
CON npmProc.waitFor(4, TimeUnit.SECONDS)
|
<<<<<<< SEARCH
else -> listOf("/usr/bin/env", "bash", "-c", command)
}
val npmProc = ProcessBuilder(npmCmd)
.redirectError(ProcessBuilder.Redirect.PIPE)
.redirectOutput(ProcessBuilder.Redirect.PIPE)
.start()
npmProc.waitFor(4, TimeUnit.SECONDS)
return npmProc.inputStream.bufferedReader().readLine()
=======
else -> listOf("/usr/bin/env", "bash", "-c", command)
}
val npmProc = ProcessBuilder(npmCmd).start()
npmProc.waitFor(4, TimeUnit.SECONDS)
return npmProc.inputStream.bufferedReader().readLine()
>>>>>>> REPLACE
|
edloidas/rollrobot
|
217563dbab97f45e7608661db926dd462261c14d
|
src/handlers.js
|
javascript
|
mit
|
:bug: Fix inline query handler creation
|
const { inline, roll, full, random, help, deprecated } = require('./query');
const { createOptions, createInlineOptions } = require('./options');
const { error } = require('./text');
function createHandler(bot, query) {
const { regexp, reply } = query;
bot.onText(regexp, (msg, match) => {
try {
const { id } = msg.chat;
const notation = ((match && match[3]) || '').trim();
const response = reply(notation) || error;
const options = createOptions(msg);
bot.sendMessage(id, response, options);
} catch (e) {
console.error(e);
}
});
}
function createInlineHandler(bot) {
const { createInlineArticles } = inline;
bot.onText('inline_query', msg => {
try {
const { id, query } = msg;
const options = createInlineOptions();
const results = createInlineArticles(query);
bot.answerInlineQuery(id, results, options);
} catch (e) {
console.error(e);
}
});
}
function initHandlers(bot) {
createInlineHandler(bot);
createHandler(bot, roll);
createHandler(bot, full);
createHandler(bot, random);
createHandler(bot, help);
createHandler(bot, deprecated);
}
module.exports = {
initHandlers
};
|
const { inline, roll, full, random, help, deprecated } = require('./query');
const { createOptions, createInlineOptions } = require('./options');
const { error } = require('./text');
/*
More event type are described in official API of `node-telegram-bot-api`
https://github.com/yagop/node-telegram-bot-api/blob/master/doc/usage.md
*/
function createHandler(bot, query) {
const { regexp, reply } = query;
bot.onText(regexp, (msg, match) => {
try {
const { id } = msg.chat;
const notation = ((match && match[3]) || '').trim();
const response = reply(notation) || error;
const options = createOptions(msg);
bot.sendMessage(id, response, options);
} catch (e) {
console.error(e);
}
});
}
function createInlineHandler(bot) {
const { createInlineArticles } = inline;
bot.on('inline_query', msg => {
try {
const { id, query } = msg;
const options = createInlineOptions();
const results = createInlineArticles(query);
bot.answerInlineQuery(id, results, options);
} catch (e) {
console.error(e);
}
});
}
function initHandlers(bot) {
createInlineHandler(bot);
createHandler(bot, roll);
createHandler(bot, full);
createHandler(bot, random);
createHandler(bot, help);
createHandler(bot, deprecated);
}
module.exports = {
initHandlers
};
| 6
| 1
| 2
|
mixed
|
--- a/src/handlers.js
+++ b/src/handlers.js
@@ -3,2 +3,7 @@
const { error } = require('./text');
+
+/*
+More event type are described in official API of `node-telegram-bot-api`
+https://github.com/yagop/node-telegram-bot-api/blob/master/doc/usage.md
+*/
@@ -23,3 +28,3 @@
- bot.onText('inline_query', msg => {
+ bot.on('inline_query', msg => {
try {
|
--- a/src/handlers.js
+++ b/src/handlers.js
@@ ... @@
const { error } = require('./text');
+
+/*
+More event type are described in official API of `node-telegram-bot-api`
+https://github.com/yagop/node-telegram-bot-api/blob/master/doc/usage.md
+*/
@@ ... @@
- bot.onText('inline_query', msg => {
+ bot.on('inline_query', msg => {
try {
|
--- a/src/handlers.js
+++ b/src/handlers.js
@@ -3,2 +3,7 @@
CON const { error } = require('./text');
ADD
ADD /*
ADD More event type are described in official API of `node-telegram-bot-api`
ADD https://github.com/yagop/node-telegram-bot-api/blob/master/doc/usage.md
ADD */
CON
@@ -23,3 +28,3 @@
CON
DEL bot.onText('inline_query', msg => {
ADD bot.on('inline_query', msg => {
CON try {
|
<<<<<<< SEARCH
const { createOptions, createInlineOptions } = require('./options');
const { error } = require('./text');
function createHandler(bot, query) {
=======
const { createOptions, createInlineOptions } = require('./options');
const { error } = require('./text');
/*
More event type are described in official API of `node-telegram-bot-api`
https://github.com/yagop/node-telegram-bot-api/blob/master/doc/usage.md
*/
function createHandler(bot, query) {
>>>>>>> REPLACE
<<<<<<< SEARCH
const { createInlineArticles } = inline;
bot.onText('inline_query', msg => {
try {
const { id, query } = msg;
=======
const { createInlineArticles } = inline;
bot.on('inline_query', msg => {
try {
const { id, query } = msg;
>>>>>>> REPLACE
|
Reduks/Reduks
|
94ce4240f2ec1c0e76950467ecbc698b74a66a47
|
src/test/java/com/reduks/reduks/StoreTest.kt
|
kotlin
|
mit
|
Create store get state test
|
package com.reduks.reduks
import com.reduks.reduks.repository.FakeActions
import com.reduks.reduks.repository.FakeData
import com.reduks.reduks.repository.FakeState
import com.reduks.reduks.subscription.Subscriber
import com.reduks.reduks.subscription.Subscription
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.given
import org.jetbrains.spek.api.dsl.it
import org.junit.platform.runner.JUnitPlatform
import org.junit.runner.RunWith
import kotlin.test.assertTrue
@RunWith(JUnitPlatform::class)
class StoreTest : Spek({
given("subscriptions") {
val subscription = Subscription {
print("\nunsubscribed")
assertTrue(true)
}
val subscriber = Subscriber<FakeState> { state ->
print("\n$state")
assertTrue(state.name.toLowerCase().trim() == "bloder")
}
beforeEach {
FakeData.store.subscribe(subscriber)
print("\nsubscribed")
}
it("should return a transformed state in subscriber state changed action") {
FakeData.store.dispatch(FakeActions.SetValidState())
}
it("confirm that unsubscribing works") {
subscription.unsubscribe()
}
}
})
|
package com.reduks.reduks
import com.reduks.reduks.repository.FakeActions
import com.reduks.reduks.repository.FakeData
import com.reduks.reduks.repository.FakeState
import com.reduks.reduks.subscription.Subscriber
import com.reduks.reduks.subscription.Subscription
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.given
import org.jetbrains.spek.api.dsl.it
import org.junit.platform.runner.JUnitPlatform
import org.junit.runner.RunWith
import kotlin.test.assertTrue
@RunWith(JUnitPlatform::class)
class StoreTest : Spek({
given("subscriptions") {
val subscription = Subscription {
assertTrue(true)
}
val subscriber = Subscriber<FakeState> { state ->
assertTrue(state.name.toLowerCase().trim() == "bloder")
}
beforeEach {
FakeData.store.subscribe(subscriber)
}
it("should return a transformed state in subscriber state changed action") {
FakeData.store.dispatch(FakeActions.SetValidState())
}
it("confirm that unsubscribing works") {
subscription.unsubscribe()
}
it("should return updated state when I get it from store") {
FakeData.store.subscribe(Subscriber {})
FakeData.store.dispatch(FakeActions.SetValidState())
assertTrue { FakeData.store.getState().name.trim().toLowerCase() == "bloder" }
}
}
})
| 9
| 4
| 3
|
mixed
|
--- a/src/test/java/com/reduks/reduks/StoreTest.kt
+++ b/src/test/java/com/reduks/reduks/StoreTest.kt
@@ -20,7 +20,6 @@
val subscription = Subscription {
- print("\nunsubscribed")
assertTrue(true)
}
+
val subscriber = Subscriber<FakeState> { state ->
- print("\n$state")
assertTrue(state.name.toLowerCase().trim() == "bloder")
@@ -30,3 +29,2 @@
FakeData.store.subscribe(subscriber)
- print("\nsubscribed")
}
@@ -40,4 +38,11 @@
}
-
+
+ it("should return updated state when I get it from store") {
+ FakeData.store.subscribe(Subscriber {})
+ FakeData.store.dispatch(FakeActions.SetValidState())
+ assertTrue { FakeData.store.getState().name.trim().toLowerCase() == "bloder" }
+ }
+
}
+
})
|
--- a/src/test/java/com/reduks/reduks/StoreTest.kt
+++ b/src/test/java/com/reduks/reduks/StoreTest.kt
@@ ... @@
val subscription = Subscription {
- print("\nunsubscribed")
assertTrue(true)
}
+
val subscriber = Subscriber<FakeState> { state ->
- print("\n$state")
assertTrue(state.name.toLowerCase().trim() == "bloder")
@@ ... @@
FakeData.store.subscribe(subscriber)
- print("\nsubscribed")
}
@@ ... @@
}
-
+
+ it("should return updated state when I get it from store") {
+ FakeData.store.subscribe(Subscriber {})
+ FakeData.store.dispatch(FakeActions.SetValidState())
+ assertTrue { FakeData.store.getState().name.trim().toLowerCase() == "bloder" }
+ }
+
}
+
})
|
--- a/src/test/java/com/reduks/reduks/StoreTest.kt
+++ b/src/test/java/com/reduks/reduks/StoreTest.kt
@@ -20,7 +20,6 @@
CON val subscription = Subscription {
DEL print("\nunsubscribed")
CON assertTrue(true)
CON }
ADD
CON val subscriber = Subscriber<FakeState> { state ->
DEL print("\n$state")
CON assertTrue(state.name.toLowerCase().trim() == "bloder")
@@ -30,3 +29,2 @@
CON FakeData.store.subscribe(subscriber)
DEL print("\nsubscribed")
CON }
@@ -40,4 +38,11 @@
CON }
DEL
ADD
ADD it("should return updated state when I get it from store") {
ADD FakeData.store.subscribe(Subscriber {})
ADD FakeData.store.dispatch(FakeActions.SetValidState())
ADD assertTrue { FakeData.store.getState().name.trim().toLowerCase() == "bloder" }
ADD }
ADD
CON }
ADD
CON })
|
<<<<<<< SEARCH
val subscription = Subscription {
print("\nunsubscribed")
assertTrue(true)
}
val subscriber = Subscriber<FakeState> { state ->
print("\n$state")
assertTrue(state.name.toLowerCase().trim() == "bloder")
}
beforeEach {
FakeData.store.subscribe(subscriber)
print("\nsubscribed")
}
=======
val subscription = Subscription {
assertTrue(true)
}
val subscriber = Subscriber<FakeState> { state ->
assertTrue(state.name.toLowerCase().trim() == "bloder")
}
beforeEach {
FakeData.store.subscribe(subscriber)
}
>>>>>>> REPLACE
<<<<<<< SEARCH
subscription.unsubscribe()
}
}
})
=======
subscription.unsubscribe()
}
it("should return updated state when I get it from store") {
FakeData.store.subscribe(Subscriber {})
FakeData.store.dispatch(FakeActions.SetValidState())
assertTrue { FakeData.store.getState().name.trim().toLowerCase() == "bloder" }
}
}
})
>>>>>>> REPLACE
|
sourrust/flac
|
2d24d4d1ab14f99d72461c2cef52c21dab9d88c9
|
src/lib.rs
|
rust
|
bsd-3-clause
|
Add documentation for type information of iter
|
//! An implementation of [FLAC](https://xiph.org/flac), free lossless audio
//! codec, written in Rust.
//!
//! The code is available on [GitHub](https://github.com/sourrust/flac).
//!
//! # Examples
//!
//! Basic decoding from a file.
//!
//! ```
//! use flac::StreamReader;
//! use std::fs::File;
//!
//! match StreamReader::<File>::from_file("path/to/file.flac") {
//! Ok(mut stream) => {
//! // Copy of `StreamInfo` to help convert to a different audio format.
//! let info = stream.info();
//!
//! for sample in stream.iter::<i16>() {
//! // Iterate over each decoded sample
//! }
//! }
//! Err(error) => println!("{:?}", error),
//! }
//! ```
#[macro_use]
extern crate nom;
#[macro_use]
mod utility;
mod frame;
mod subframe;
pub mod metadata;
pub mod stream;
pub use metadata::Metadata;
pub use stream::{Stream, StreamBuffer, StreamReader};
pub use utility::{
Sample, SampleSize,
StreamProducer, ReadStream, ByteStream,
ErrorKind
};
|
//! An implementation of [FLAC](https://xiph.org/flac), free lossless audio
//! codec, written in Rust.
//!
//! The code is available on [GitHub](https://github.com/sourrust/flac).
//!
//! # Examples
//!
//! Basic decoding from a file.
//!
//! ```
//! use flac::StreamReader;
//! use std::fs::File;
//!
//! match StreamReader::<File>::from_file("path/to/file.flac") {
//! Ok(mut stream) => {
//! // Copy of `StreamInfo` to help convert to a different audio format.
//! let info = stream.info();
//!
//! // The explicit size for `Stream::iter` is the resulting decoded
//! // sample. You can usually find out the desired size of the
//! // samples with `info.bits_per_sample`.
//! for sample in stream.iter::<i16>() {
//! // Iterate over each decoded sample
//! }
//! }
//! Err(error) => println!("{:?}", error),
//! }
//! ```
#[macro_use]
extern crate nom;
#[macro_use]
mod utility;
mod frame;
mod subframe;
pub mod metadata;
pub mod stream;
pub use metadata::Metadata;
pub use stream::{Stream, StreamBuffer, StreamReader};
pub use utility::{
Sample, SampleSize,
StreamProducer, ReadStream, ByteStream,
ErrorKind
};
| 3
| 0
| 1
|
add_only
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -18,2 +18,5 @@
//!
+//! // The explicit size for `Stream::iter` is the resulting decoded
+//! // sample. You can usually find out the desired size of the
+//! // samples with `info.bits_per_sample`.
//! for sample in stream.iter::<i16>() {
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ ... @@
//!
+//! // The explicit size for `Stream::iter` is the resulting decoded
+//! // sample. You can usually find out the desired size of the
+//! // samples with `info.bits_per_sample`.
//! for sample in stream.iter::<i16>() {
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -18,2 +18,5 @@
CON //!
ADD //! // The explicit size for `Stream::iter` is the resulting decoded
ADD //! // sample. You can usually find out the desired size of the
ADD //! // samples with `info.bits_per_sample`.
CON //! for sample in stream.iter::<i16>() {
|
<<<<<<< SEARCH
//! let info = stream.info();
//!
//! for sample in stream.iter::<i16>() {
//! // Iterate over each decoded sample
=======
//! let info = stream.info();
//!
//! // The explicit size for `Stream::iter` is the resulting decoded
//! // sample. You can usually find out the desired size of the
//! // samples with `info.bits_per_sample`.
//! for sample in stream.iter::<i16>() {
//! // Iterate over each decoded sample
>>>>>>> REPLACE
|
tonyli71/designate
|
4a711a2709ec5d8a8e04bb0f735fcfaa319cffdf
|
designate/objects/validation_error.py
|
python
|
apache-2.0
|
Fix the displayed error message in V2 API
Change-Id: I07c3f1ed79fa507dbe9b76eb8f5964475516754c
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = six.text_type(js_error)
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
| 1
| 3
| 2
|
mixed
|
--- a/designate/objects/validation_error.py
+++ b/designate/objects/validation_error.py
@@ -13,4 +13,2 @@
# under the License.
-import six
-
from designate.objects import base
@@ -35,3 +33,3 @@
e.path = list(getattr(js_error, 'releative_path', js_error.path))
- e.message = six.text_type(js_error)
+ e.message = js_error.message
e.validator = js_error.validator
|
--- a/designate/objects/validation_error.py
+++ b/designate/objects/validation_error.py
@@ ... @@
# under the License.
-import six
-
from designate.objects import base
@@ ... @@
e.path = list(getattr(js_error, 'releative_path', js_error.path))
- e.message = six.text_type(js_error)
+ e.message = js_error.message
e.validator = js_error.validator
|
--- a/designate/objects/validation_error.py
+++ b/designate/objects/validation_error.py
@@ -13,4 +13,2 @@
CON # under the License.
DEL import six
DEL
CON from designate.objects import base
@@ -35,3 +33,3 @@
CON e.path = list(getattr(js_error, 'releative_path', js_error.path))
DEL e.message = six.text_type(js_error)
ADD e.message = js_error.message
CON e.validator = js_error.validator
|
<<<<<<< SEARCH
# License for the specific language governing permissions and limitations
# under the License.
import six
from designate.objects import base
=======
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
>>>>>>> REPLACE
<<<<<<< SEARCH
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = six.text_type(js_error)
e.validator = js_error.validator
e.validator_value = js_error.validator_value
=======
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
>>>>>>> REPLACE
|
prasos/bittiraha-walletd
|
2f0d51b524d5ca0e6a769cfcaeb81724c7c1491a
|
src/fi/bittiraha/walletd/WalletAccountManager.java
|
java
|
apache-2.0
|
Make walletextension non-mandatory for now since it's not in use.
|
package fi.bittiraha.walletd;
import org.bitcoinj.core.*;
import org.bitcoinj.kits.WalletAppKit;
import net.minidev.json.*;
import com.google.common.collect.ImmutableList;
import java.util.*;
import java.io.File;
/**
* This class extends WalletAppKit to add ability to tag individual addresses
* with account names to emulate bitcoind's accounts. However, emulation in
* this version is incomplete and only useful in searching for incoming txs.
*/
public class WalletAccountManager extends WalletAppKit {
AccountManager manager;
public WalletAccountManager(NetworkParameters params, File directory, String filePrefix) {
super(params,directory,filePrefix);
manager = new AccountManager();
}
protected class AccountManager extends JSONObject implements WalletExtension {
public void deserializeWalletExtension(Wallet containingWallet, byte[] data) {
Object parsed = JSONValue.parse(data);
if (parsed instanceof JSONObject) {
this.merge((JSONObject)parsed);
}
}
public String getWalletExtensionID() {
return "fi.bittiraha.walletd.WalletAccountManager";
}
public boolean isWalletExtensionMandatory() {
return true;
}
public byte[] serializeWalletExtension() {
return this.toJSONString(JSONStyle.MAX_COMPRESS).getBytes();
}
}
protected List<WalletExtension> provideWalletExtensions() throws Exception {
return ImmutableList.of((WalletExtension)manager);
}
public Map<String,Object> getAccountMap() {
return manager;
}
}
|
package fi.bittiraha.walletd;
import org.bitcoinj.core.*;
import org.bitcoinj.kits.WalletAppKit;
import net.minidev.json.*;
import com.google.common.collect.ImmutableList;
import java.util.*;
import java.io.File;
/**
* This class extends WalletAppKit to add ability to tag individual addresses
* with account names to emulate bitcoind's accounts. However, emulation in
* this version is incomplete and only useful in searching for incoming txs.
*/
public class WalletAccountManager extends WalletAppKit {
AccountManager manager;
public WalletAccountManager(NetworkParameters params, File directory, String filePrefix) {
super(params,directory,filePrefix);
manager = new AccountManager();
}
protected class AccountManager extends JSONObject implements WalletExtension {
public void deserializeWalletExtension(Wallet containingWallet, byte[] data) {
Object parsed = JSONValue.parse(data);
if (parsed instanceof JSONObject) {
this.merge((JSONObject)parsed);
}
}
public String getWalletExtensionID() {
return "fi.bittiraha.walletd.WalletAccountManager";
}
public boolean isWalletExtensionMandatory() {
// FIXME, set this to true when this module actually does something
return false;
}
public byte[] serializeWalletExtension() {
return this.toJSONString(JSONStyle.MAX_COMPRESS).getBytes();
}
}
protected List<WalletExtension> provideWalletExtensions() throws Exception {
return ImmutableList.of((WalletExtension)manager);
}
public Map<String,Object> getAccountMap() {
return manager;
}
}
| 2
| 1
| 1
|
mixed
|
--- a/src/fi/bittiraha/walletd/WalletAccountManager.java
+++ b/src/fi/bittiraha/walletd/WalletAccountManager.java
@@ -33,3 +33,4 @@
public boolean isWalletExtensionMandatory() {
- return true;
+ // FIXME, set this to true when this module actually does something
+ return false;
}
|
--- a/src/fi/bittiraha/walletd/WalletAccountManager.java
+++ b/src/fi/bittiraha/walletd/WalletAccountManager.java
@@ ... @@
public boolean isWalletExtensionMandatory() {
- return true;
+ // FIXME, set this to true when this module actually does something
+ return false;
}
|
--- a/src/fi/bittiraha/walletd/WalletAccountManager.java
+++ b/src/fi/bittiraha/walletd/WalletAccountManager.java
@@ -33,3 +33,4 @@
CON public boolean isWalletExtensionMandatory() {
DEL return true;
ADD // FIXME, set this to true when this module actually does something
ADD return false;
CON }
|
<<<<<<< SEARCH
}
public boolean isWalletExtensionMandatory() {
return true;
}
public byte[] serializeWalletExtension() {
=======
}
public boolean isWalletExtensionMandatory() {
// FIXME, set this to true when this module actually does something
return false;
}
public byte[] serializeWalletExtension() {
>>>>>>> REPLACE
|
azaroth42/iiif-harvester
|
c4103c00b51ddb9cb837d65b43c972505e533bdc
|
tilescraper.py
|
python
|
apache-2.0
|
Add in good practices for crawling
|
from PIL import Image
import json, StringIO, requests
import time
service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
tilesize = 400
for x in range(w/tilesize+1):
for y in range(h/tilesize+1):
region = "%s,%s,%s,%s" % (x*tilesize, y*tilesize, tilesize, tilesize)
tileresp = requests.get(service + ("/%s/full/0/default.jpg" % region))
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
img.save("full.jpg")
|
from PIL import Image
import json, StringIO, requests
import time
import robotparser
import re
host = "http://dlss-dev-azaroth.stanford.edu/"
service = host + "services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
## Respect tile dimensions of server
tilesize = 1024
if js.has_key('tiles'):
tilesize = js['tiles']['width']
## Introduce baseline crawl delay
delay = 1
## Parse robots.txt
resp = requests.get(host + "/robots.txt")
if resp.status == 200:
parser = robotparser.RobotFileParser()
parser.parse(resp.text)
okay = parser.can_fetch("*", service)
if not okay:
print "Blocked by robots.txt"
sys.exit()
# No support for Crawl-delay extension ... just search
cd = re.compile("Crawl-delay: ([0-9]+)")
m = cd.search(resp.text)
if m:
delay = int(m.groups()[0])
for x in range(w/tilesize+1):
for y in range(h/tilesize+1):
region = "%s,%s,%s,%s" % (x*tilesize, y*tilesize, tilesize, tilesize)
tileresp = requests.get(service + ("/%s/full/0/default.jpg" % region))
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(delay)
img.save("full.jpg")
| 32
| 2
| 3
|
mixed
|
--- a/tilescraper.py
+++ b/tilescraper.py
@@ -3,4 +3,8 @@
import time
+import robotparser
+import re
-service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
+host = "http://dlss-dev-azaroth.stanford.edu/"
+
+service = host + "services/iiif/f1rc/"
resp = requests.get(service + "info.json")
@@ -10,3 +14,25 @@
img = Image.new("RGB", (w,h), "white")
-tilesize = 400
+
+## Respect tile dimensions of server
+tilesize = 1024
+if js.has_key('tiles'):
+ tilesize = js['tiles']['width']
+
+## Introduce baseline crawl delay
+delay = 1
+
+## Parse robots.txt
+resp = requests.get(host + "/robots.txt")
+if resp.status == 200:
+ parser = robotparser.RobotFileParser()
+ parser.parse(resp.text)
+ okay = parser.can_fetch("*", service)
+ if not okay:
+ print "Blocked by robots.txt"
+ sys.exit()
+ # No support for Crawl-delay extension ... just search
+ cd = re.compile("Crawl-delay: ([0-9]+)")
+ m = cd.search(resp.text)
+ if m:
+ delay = int(m.groups()[0])
@@ -18,2 +44,6 @@
img.paste(tile, (x*tilesize,y*tilesize))
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ time.sleep(delay)
+
img.save("full.jpg")
|
--- a/tilescraper.py
+++ b/tilescraper.py
@@ ... @@
import time
+import robotparser
+import re
-service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
+host = "http://dlss-dev-azaroth.stanford.edu/"
+
+service = host + "services/iiif/f1rc/"
resp = requests.get(service + "info.json")
@@ ... @@
img = Image.new("RGB", (w,h), "white")
-tilesize = 400
+
+## Respect tile dimensions of server
+tilesize = 1024
+if js.has_key('tiles'):
+ tilesize = js['tiles']['width']
+
+## Introduce baseline crawl delay
+delay = 1
+
+## Parse robots.txt
+resp = requests.get(host + "/robots.txt")
+if resp.status == 200:
+ parser = robotparser.RobotFileParser()
+ parser.parse(resp.text)
+ okay = parser.can_fetch("*", service)
+ if not okay:
+ print "Blocked by robots.txt"
+ sys.exit()
+ # No support for Crawl-delay extension ... just search
+ cd = re.compile("Crawl-delay: ([0-9]+)")
+ m = cd.search(resp.text)
+ if m:
+ delay = int(m.groups()[0])
@@ ... @@
img.paste(tile, (x*tilesize,y*tilesize))
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ time.sleep(delay)
+
img.save("full.jpg")
|
--- a/tilescraper.py
+++ b/tilescraper.py
@@ -3,4 +3,8 @@
CON import time
ADD import robotparser
ADD import re
CON
DEL service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
ADD host = "http://dlss-dev-azaroth.stanford.edu/"
ADD
ADD service = host + "services/iiif/f1rc/"
CON resp = requests.get(service + "info.json")
@@ -10,3 +14,25 @@
CON img = Image.new("RGB", (w,h), "white")
DEL tilesize = 400
ADD
ADD ## Respect tile dimensions of server
ADD tilesize = 1024
ADD if js.has_key('tiles'):
ADD tilesize = js['tiles']['width']
ADD
ADD ## Introduce baseline crawl delay
ADD delay = 1
ADD
ADD ## Parse robots.txt
ADD resp = requests.get(host + "/robots.txt")
ADD if resp.status == 200:
ADD parser = robotparser.RobotFileParser()
ADD parser.parse(resp.text)
ADD okay = parser.can_fetch("*", service)
ADD if not okay:
ADD print "Blocked by robots.txt"
ADD sys.exit()
ADD # No support for Crawl-delay extension ... just search
ADD cd = re.compile("Crawl-delay: ([0-9]+)")
ADD m = cd.search(resp.text)
ADD if m:
ADD delay = int(m.groups()[0])
CON
@@ -18,2 +44,6 @@
CON img.paste(tile, (x*tilesize,y*tilesize))
ADD sys.stdout.write('.')
ADD sys.stdout.flush()
ADD time.sleep(delay)
ADD
CON img.save("full.jpg")
|
<<<<<<< SEARCH
import json, StringIO, requests
import time
service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
tilesize = 400
for x in range(w/tilesize+1):
=======
import json, StringIO, requests
import time
import robotparser
import re
host = "http://dlss-dev-azaroth.stanford.edu/"
service = host + "services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
## Respect tile dimensions of server
tilesize = 1024
if js.has_key('tiles'):
tilesize = js['tiles']['width']
## Introduce baseline crawl delay
delay = 1
## Parse robots.txt
resp = requests.get(host + "/robots.txt")
if resp.status == 200:
parser = robotparser.RobotFileParser()
parser.parse(resp.text)
okay = parser.can_fetch("*", service)
if not okay:
print "Blocked by robots.txt"
sys.exit()
# No support for Crawl-delay extension ... just search
cd = re.compile("Crawl-delay: ([0-9]+)")
m = cd.search(resp.text)
if m:
delay = int(m.groups()[0])
for x in range(w/tilesize+1):
>>>>>>> REPLACE
<<<<<<< SEARCH
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
img.save("full.jpg")
=======
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(delay)
img.save("full.jpg")
>>>>>>> REPLACE
|
rlucioni/typesetter
|
f5206fa6cd94758202378b7616e578bd8a3a8dfe
|
tasks.py
|
python
|
mit
|
Use threads to allow simultaneous serving of site and building of assets
|
"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
|
"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
| 13
| 1
| 3
|
mixed
|
--- a/tasks.py
+++ b/tasks.py
@@ -1,2 +1,4 @@
"""Task functions for use with Invoke."""
+from threading import Thread
+
from invoke import task
@@ -24,3 +26,3 @@
@task
-def run(context, host='127.0.0.1', port='5000'):
+def serve(context, host='127.0.0.1', port='5000'):
steps = [
@@ -40 +42,11 @@
context.run(cmd)
+
+
+@task
+def stream(context, host=None):
+ tasks = [static, serve]
+
+ threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
+
+ [t.start() for t in threads]
+ [t.join() for t in threads]
|
--- a/tasks.py
+++ b/tasks.py
@@ ... @@
"""Task functions for use with Invoke."""
+from threading import Thread
+
from invoke import task
@@ ... @@
@task
-def run(context, host='127.0.0.1', port='5000'):
+def serve(context, host='127.0.0.1', port='5000'):
steps = [
@@ ... @@
context.run(cmd)
+
+
+@task
+def stream(context, host=None):
+ tasks = [static, serve]
+
+ threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
+
+ [t.start() for t in threads]
+ [t.join() for t in threads]
|
--- a/tasks.py
+++ b/tasks.py
@@ -1,2 +1,4 @@
CON """Task functions for use with Invoke."""
ADD from threading import Thread
ADD
CON from invoke import task
@@ -24,3 +26,3 @@
CON @task
DEL def run(context, host='127.0.0.1', port='5000'):
ADD def serve(context, host='127.0.0.1', port='5000'):
CON steps = [
@@ -40 +42,11 @@
CON context.run(cmd)
ADD
ADD
ADD @task
ADD def stream(context, host=None):
ADD tasks = [static, serve]
ADD
ADD threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
ADD
ADD [t.start() for t in threads]
ADD [t.join() for t in threads]
|
<<<<<<< SEARCH
"""Task functions for use with Invoke."""
from invoke import task
=======
"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
>>>>>>> REPLACE
<<<<<<< SEARCH
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
=======
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
>>>>>>> REPLACE
<<<<<<< SEARCH
context.run(cmd)
=======
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
>>>>>>> REPLACE
|
klaseskilson/TNM031-labs
|
2596904080e1367a1d82fce2083a2d5b7f6e04a9
|
lab4/src/SecureElection/SecureElectionClient.java
|
java
|
mit
|
Add SSL setup to client
|
package SecureElection;
/**
* Created by Klas Eskilson on 15-11-16.
*/
public class SecureElectionClient {
public static void main(String[] args) {
}
}
|
package SecureElection;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.KeyStore;
import javax.net.ssl.*;
import SecureElection.Common.Settings;
/**
* Created by Klas Eskilson on 15-11-16.
*/
public class SecureElectionClient {
// constants
private static final String CLIENTTRUSTSTORE = Settings.KEYLOCATION + "ClientTruststore.ks";
private static final String CLIENTKEYSTORE = Settings.KEYLOCATION + "ClientKeystore.ks";
private static final String CLIENTPASSWORD = "somephrase";
// class variables
BufferedReader socketIn;
PrintWriter socketOut;
/**
* setup ssl client
* @param addr the address to connect to
*/
private void setupSSLClient(InetAddress hostAddr) {
try {
// load keystores
KeyStore ks = KeyStore.getInstance("JCEKS");
ks.load(new FileInputStream(CLIENTKEYSTORE),
CLIENTPASSWORD.toCharArray());
KeyStore ts = KeyStore.getInstance("JCEKS");
ts.load(new FileInputStream(CLIENTTRUSTSTORE),
CLIENTPASSWORD.toCharArray());
// setup key managers
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(ks, CLIENTPASSWORD.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
tmf.init(ts);
// setup ssl
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
SSLSocketFactory sslFact = sslContext.getSocketFactory();
SSLSocket client = (SSLSocket) sslFact.createSocket(hostAddr, this.hostPort);
client.setEnabledCipherSuites(client.getSupportedCipherSuites());
// setup transmissions
socketIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
socketOut = new PrintWriter(client.getOutputStream(), true);
} catch (Exception e) {
System.out.println(e);
e.printStackTrace();
}
}
public static void main(String[] args) {
try {
// setup connection
InetAddress localhost = InetAddress.getLocalHost();
setupSSLClient(localhost);
} catch (UnknownHostException uhe) {
System.out.println(uhe);
uhe.printStackTrace();
}
}
}
| 63
| 0
| 2
|
add_only
|
--- a/lab4/src/SecureElection/SecureElectionClient.java
+++ b/lab4/src/SecureElection/SecureElectionClient.java
@@ -1,2 +1,13 @@
package SecureElection;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.security.KeyStore;
+import javax.net.ssl.*;
+import SecureElection.Common.Settings;
+
/**
@@ -6,4 +17,56 @@
public class SecureElectionClient {
+ // constants
+ private static final String CLIENTTRUSTSTORE = Settings.KEYLOCATION + "ClientTruststore.ks";
+ private static final String CLIENTKEYSTORE = Settings.KEYLOCATION + "ClientKeystore.ks";
+ private static final String CLIENTPASSWORD = "somephrase";
+
+ // class variables
+ BufferedReader socketIn;
+ PrintWriter socketOut;
+
+ /**
+ * setup ssl client
+ * @param addr the address to connect to
+ */
+ private void setupSSLClient(InetAddress hostAddr) {
+ try {
+ // load keystores
+ KeyStore ks = KeyStore.getInstance("JCEKS");
+ ks.load(new FileInputStream(CLIENTKEYSTORE),
+ CLIENTPASSWORD.toCharArray());
+ KeyStore ts = KeyStore.getInstance("JCEKS");
+ ts.load(new FileInputStream(CLIENTTRUSTSTORE),
+ CLIENTPASSWORD.toCharArray());
+
+ // setup key managers
+ KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+ kmf.init(ks, CLIENTPASSWORD.toCharArray());
+ TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
+ tmf.init(ts);
+
+ // setup ssl
+ SSLContext sslContext = SSLContext.getInstance("TLS");
+ sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
+ SSLSocketFactory sslFact = sslContext.getSocketFactory();
+ SSLSocket client = (SSLSocket) sslFact.createSocket(hostAddr, this.hostPort);
+ client.setEnabledCipherSuites(client.getSupportedCipherSuites());
+
+ // setup transmissions
+ socketIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
+ socketOut = new PrintWriter(client.getOutputStream(), true);
+ } catch (Exception e) {
+ System.out.println(e);
+ e.printStackTrace();
+ }
+ }
public static void main(String[] args) {
+ try {
+ // setup connection
+ InetAddress localhost = InetAddress.getLocalHost();
+ setupSSLClient(localhost);
+ } catch (UnknownHostException uhe) {
+ System.out.println(uhe);
+ uhe.printStackTrace();
+ }
|
--- a/lab4/src/SecureElection/SecureElectionClient.java
+++ b/lab4/src/SecureElection/SecureElectionClient.java
@@ ... @@
package SecureElection;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.security.KeyStore;
+import javax.net.ssl.*;
+import SecureElection.Common.Settings;
+
/**
@@ ... @@
public class SecureElectionClient {
+ // constants
+ private static final String CLIENTTRUSTSTORE = Settings.KEYLOCATION + "ClientTruststore.ks";
+ private static final String CLIENTKEYSTORE = Settings.KEYLOCATION + "ClientKeystore.ks";
+ private static final String CLIENTPASSWORD = "somephrase";
+
+ // class variables
+ BufferedReader socketIn;
+ PrintWriter socketOut;
+
+ /**
+ * setup ssl client
+ * @param addr the address to connect to
+ */
+ private void setupSSLClient(InetAddress hostAddr) {
+ try {
+ // load keystores
+ KeyStore ks = KeyStore.getInstance("JCEKS");
+ ks.load(new FileInputStream(CLIENTKEYSTORE),
+ CLIENTPASSWORD.toCharArray());
+ KeyStore ts = KeyStore.getInstance("JCEKS");
+ ts.load(new FileInputStream(CLIENTTRUSTSTORE),
+ CLIENTPASSWORD.toCharArray());
+
+ // setup key managers
+ KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+ kmf.init(ks, CLIENTPASSWORD.toCharArray());
+ TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
+ tmf.init(ts);
+
+ // setup ssl
+ SSLContext sslContext = SSLContext.getInstance("TLS");
+ sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
+ SSLSocketFactory sslFact = sslContext.getSocketFactory();
+ SSLSocket client = (SSLSocket) sslFact.createSocket(hostAddr, this.hostPort);
+ client.setEnabledCipherSuites(client.getSupportedCipherSuites());
+
+ // setup transmissions
+ socketIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
+ socketOut = new PrintWriter(client.getOutputStream(), true);
+ } catch (Exception e) {
+ System.out.println(e);
+ e.printStackTrace();
+ }
+ }
public static void main(String[] args) {
+ try {
+ // setup connection
+ InetAddress localhost = InetAddress.getLocalHost();
+ setupSSLClient(localhost);
+ } catch (UnknownHostException uhe) {
+ System.out.println(uhe);
+ uhe.printStackTrace();
+ }
|
--- a/lab4/src/SecureElection/SecureElectionClient.java
+++ b/lab4/src/SecureElection/SecureElectionClient.java
@@ -1,2 +1,13 @@
CON package SecureElection;
ADD
ADD import java.io.BufferedReader;
ADD import java.io.FileInputStream;
ADD import java.io.InputStreamReader;
ADD import java.io.PrintWriter;
ADD import java.net.InetAddress;
ADD import java.net.UnknownHostException;
ADD import java.security.KeyStore;
ADD import javax.net.ssl.*;
ADD import SecureElection.Common.Settings;
ADD
CON /**
@@ -6,4 +17,56 @@
CON public class SecureElectionClient {
ADD // constants
ADD private static final String CLIENTTRUSTSTORE = Settings.KEYLOCATION + "ClientTruststore.ks";
ADD private static final String CLIENTKEYSTORE = Settings.KEYLOCATION + "ClientKeystore.ks";
ADD private static final String CLIENTPASSWORD = "somephrase";
ADD
ADD // class variables
ADD BufferedReader socketIn;
ADD PrintWriter socketOut;
ADD
ADD /**
ADD * setup ssl client
ADD * @param addr the address to connect to
ADD */
ADD private void setupSSLClient(InetAddress hostAddr) {
ADD try {
ADD // load keystores
ADD KeyStore ks = KeyStore.getInstance("JCEKS");
ADD ks.load(new FileInputStream(CLIENTKEYSTORE),
ADD CLIENTPASSWORD.toCharArray());
ADD KeyStore ts = KeyStore.getInstance("JCEKS");
ADD ts.load(new FileInputStream(CLIENTTRUSTSTORE),
ADD CLIENTPASSWORD.toCharArray());
ADD
ADD // setup key managers
ADD KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
ADD kmf.init(ks, CLIENTPASSWORD.toCharArray());
ADD TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
ADD tmf.init(ts);
ADD
ADD // setup ssl
ADD SSLContext sslContext = SSLContext.getInstance("TLS");
ADD sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
ADD SSLSocketFactory sslFact = sslContext.getSocketFactory();
ADD SSLSocket client = (SSLSocket) sslFact.createSocket(hostAddr, this.hostPort);
ADD client.setEnabledCipherSuites(client.getSupportedCipherSuites());
ADD
ADD // setup transmissions
ADD socketIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
ADD socketOut = new PrintWriter(client.getOutputStream(), true);
ADD } catch (Exception e) {
ADD System.out.println(e);
ADD e.printStackTrace();
ADD }
ADD }
CON
CON public static void main(String[] args) {
ADD try {
ADD // setup connection
ADD InetAddress localhost = InetAddress.getLocalHost();
ADD setupSSLClient(localhost);
ADD } catch (UnknownHostException uhe) {
ADD System.out.println(uhe);
ADD uhe.printStackTrace();
ADD }
CON
|
<<<<<<< SEARCH
package SecureElection;
/**
* Created by Klas Eskilson on 15-11-16.
*/
public class SecureElectionClient {
public static void main(String[] args) {
}
=======
package SecureElection;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.KeyStore;
import javax.net.ssl.*;
import SecureElection.Common.Settings;
/**
* Created by Klas Eskilson on 15-11-16.
*/
public class SecureElectionClient {
// constants
private static final String CLIENTTRUSTSTORE = Settings.KEYLOCATION + "ClientTruststore.ks";
private static final String CLIENTKEYSTORE = Settings.KEYLOCATION + "ClientKeystore.ks";
private static final String CLIENTPASSWORD = "somephrase";
// class variables
BufferedReader socketIn;
PrintWriter socketOut;
/**
* setup ssl client
* @param addr the address to connect to
*/
private void setupSSLClient(InetAddress hostAddr) {
try {
// load keystores
KeyStore ks = KeyStore.getInstance("JCEKS");
ks.load(new FileInputStream(CLIENTKEYSTORE),
CLIENTPASSWORD.toCharArray());
KeyStore ts = KeyStore.getInstance("JCEKS");
ts.load(new FileInputStream(CLIENTTRUSTSTORE),
CLIENTPASSWORD.toCharArray());
// setup key managers
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(ks, CLIENTPASSWORD.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
tmf.init(ts);
// setup ssl
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
SSLSocketFactory sslFact = sslContext.getSocketFactory();
SSLSocket client = (SSLSocket) sslFact.createSocket(hostAddr, this.hostPort);
client.setEnabledCipherSuites(client.getSupportedCipherSuites());
// setup transmissions
socketIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
socketOut = new PrintWriter(client.getOutputStream(), true);
} catch (Exception e) {
System.out.println(e);
e.printStackTrace();
}
}
public static void main(String[] args) {
try {
// setup connection
InetAddress localhost = InetAddress.getLocalHost();
setupSSLClient(localhost);
} catch (UnknownHostException uhe) {
System.out.println(uhe);
uhe.printStackTrace();
}
}
>>>>>>> REPLACE
|
clchiou/garage
|
96fd8b71fd425d251e9cc07e8cc65b4fc040d857
|
samples/nanomsg/hello_world.py
|
python
|
mit
|
Fix message lost issue in samples
|
import os.path
import shutil
import tempfile
import threading
import sys
import nanomsg as nn
def ping(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
event.wait()
sock.send(b'Hello, World!')
def pong(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
event.set()
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
def main():
path = tempfile.mkdtemp()
try:
event = threading.Event()
url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, event)),
threading.Thread(target=pong, args=(url, event)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
finally:
shutil.rmtree(path)
return 0
if __name__ == '__main__':
sys.exit(main())
|
import threading
import sys
import nanomsg as nn
def ping(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
sock.send(b'Hello, World!')
# Shutdown the endpoint after the other side ack'ed; otherwise
# the message could be lost.
barrier.wait()
def pong(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
barrier.wait()
def main():
barrier = threading.Barrier(2)
url = 'inproc://test'
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, barrier)),
threading.Thread(target=pong, args=(url, barrier)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
return 0
if __name__ == '__main__':
sys.exit(main())
| 17
| 22
| 3
|
mixed
|
--- a/samples/nanomsg/hello_world.py
+++ b/samples/nanomsg/hello_world.py
@@ -1,4 +1 @@
-import os.path
-import shutil
-import tempfile
import threading
@@ -9,13 +6,15 @@
-def ping(url, event):
+def ping(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
- event.wait()
sock.send(b'Hello, World!')
+ # Shutdown the endpoint after the other side ack'ed; otherwise
+ # the message could be lost.
+ barrier.wait()
-def pong(url, event):
+def pong(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
- event.set()
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
+ barrier.wait()
@@ -23,17 +22,13 @@
def main():
- path = tempfile.mkdtemp()
- try:
- event = threading.Event()
- url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
- print('Play ping-pong on %s' % url)
- threads = [
- threading.Thread(target=ping, args=(url, event)),
- threading.Thread(target=pong, args=(url, event)),
- ]
- for thread in threads:
- thread.start()
- for thread in threads:
- thread.join()
- finally:
- shutil.rmtree(path)
+ barrier = threading.Barrier(2)
+ url = 'inproc://test'
+ print('Play ping-pong on %s' % url)
+ threads = [
+ threading.Thread(target=ping, args=(url, barrier)),
+ threading.Thread(target=pong, args=(url, barrier)),
+ ]
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
return 0
|
--- a/samples/nanomsg/hello_world.py
+++ b/samples/nanomsg/hello_world.py
@@ ... @@
-import os.path
-import shutil
-import tempfile
import threading
@@ ... @@
-def ping(url, event):
+def ping(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
- event.wait()
sock.send(b'Hello, World!')
+ # Shutdown the endpoint after the other side ack'ed; otherwise
+ # the message could be lost.
+ barrier.wait()
-def pong(url, event):
+def pong(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
- event.set()
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
+ barrier.wait()
@@ ... @@
def main():
- path = tempfile.mkdtemp()
- try:
- event = threading.Event()
- url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
- print('Play ping-pong on %s' % url)
- threads = [
- threading.Thread(target=ping, args=(url, event)),
- threading.Thread(target=pong, args=(url, event)),
- ]
- for thread in threads:
- thread.start()
- for thread in threads:
- thread.join()
- finally:
- shutil.rmtree(path)
+ barrier = threading.Barrier(2)
+ url = 'inproc://test'
+ print('Play ping-pong on %s' % url)
+ threads = [
+ threading.Thread(target=ping, args=(url, barrier)),
+ threading.Thread(target=pong, args=(url, barrier)),
+ ]
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
return 0
|
--- a/samples/nanomsg/hello_world.py
+++ b/samples/nanomsg/hello_world.py
@@ -1,4 +1 @@
DEL import os.path
DEL import shutil
DEL import tempfile
CON import threading
@@ -9,13 +6,15 @@
CON
DEL def ping(url, event):
ADD def ping(url, barrier):
CON with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
DEL event.wait()
CON sock.send(b'Hello, World!')
ADD # Shutdown the endpoint after the other side ack'ed; otherwise
ADD # the message could be lost.
ADD barrier.wait()
CON
CON
DEL def pong(url, event):
ADD def pong(url, barrier):
CON with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
DEL event.set()
CON message = sock.recv()
CON print(bytes(message.as_memoryview()).decode('ascii'))
ADD barrier.wait()
CON
@@ -23,17 +22,13 @@
CON def main():
DEL path = tempfile.mkdtemp()
DEL try:
DEL event = threading.Event()
DEL url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
DEL print('Play ping-pong on %s' % url)
DEL threads = [
DEL threading.Thread(target=ping, args=(url, event)),
DEL threading.Thread(target=pong, args=(url, event)),
DEL ]
DEL for thread in threads:
DEL thread.start()
DEL for thread in threads:
DEL thread.join()
DEL finally:
DEL shutil.rmtree(path)
ADD barrier = threading.Barrier(2)
ADD url = 'inproc://test'
ADD print('Play ping-pong on %s' % url)
ADD threads = [
ADD threading.Thread(target=ping, args=(url, barrier)),
ADD threading.Thread(target=pong, args=(url, barrier)),
ADD ]
ADD for thread in threads:
ADD thread.start()
ADD for thread in threads:
ADD thread.join()
CON return 0
|
<<<<<<< SEARCH
import os.path
import shutil
import tempfile
import threading
import sys
=======
import threading
import sys
>>>>>>> REPLACE
<<<<<<< SEARCH
def ping(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
event.wait()
sock.send(b'Hello, World!')
def pong(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
event.set()
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
def main():
path = tempfile.mkdtemp()
try:
event = threading.Event()
url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, event)),
threading.Thread(target=pong, args=(url, event)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
finally:
shutil.rmtree(path)
return 0
=======
def ping(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
sock.send(b'Hello, World!')
# Shutdown the endpoint after the other side ack'ed; otherwise
# the message could be lost.
barrier.wait()
def pong(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
barrier.wait()
def main():
barrier = threading.Barrier(2)
url = 'inproc://test'
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, barrier)),
threading.Thread(target=pong, args=(url, barrier)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
return 0
>>>>>>> REPLACE
|
aaronkaplan/intelmq
|
648c7fb94f92e8ef722af8c9462c9ff65bf643fc
|
intelmq/bots/collectors/mail/collector_mail_body.py
|
python
|
agpl-3.0
|
Insert date when email was received
Sometimes we receive email reports like "this is happening right now" and there is no date/time included. So if we process emails once per hour - we don't have info about event time. Additional field `extra.email_received` in the mail body collector would help.
|
# -*- coding: utf-8 -*-
"""
Uses the common mail iteration method from the lib file.
"""
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
|
# -*- coding: utf-8 -*-
"""
Uses the common mail iteration method from the lib file.
"""
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
report["extra.email_received"] = message.date
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| 1
| 0
| 1
|
add_only
|
--- a/intelmq/bots/collectors/mail/collector_mail_body.py
+++ b/intelmq/bots/collectors/mail/collector_mail_body.py
@@ -31,2 +31,3 @@
report["extra.email_message_id"] = message.message_id
+ report["extra.email_received"] = message.date
|
--- a/intelmq/bots/collectors/mail/collector_mail_body.py
+++ b/intelmq/bots/collectors/mail/collector_mail_body.py
@@ ... @@
report["extra.email_message_id"] = message.message_id
+ report["extra.email_received"] = message.date
|
--- a/intelmq/bots/collectors/mail/collector_mail_body.py
+++ b/intelmq/bots/collectors/mail/collector_mail_body.py
@@ -31,2 +31,3 @@
CON report["extra.email_message_id"] = message.message_id
ADD report["extra.email_received"] = message.date
CON
|
<<<<<<< SEARCH
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
self.send_message(report)
=======
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
report["extra.email_received"] = message.date
self.send_message(report)
>>>>>>> REPLACE
|
CyclopsMC/CyclopsCore
|
6946b0951e03821c0ca8a9380b45b7905a999488
|
src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
|
java
|
mit
|
Fix incorrect translation key metadata export
|
package org.cyclops.cyclopscore.metadata;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.registries.ForgeRegistries;
/**
* Item translation key exporter.
*/
public class RegistryExportableItemTranslationKeys implements IRegistryExportable {
@Override
public JsonObject export() {
JsonObject element = new JsonObject();
JsonArray elements = new JsonArray();
element.add("items", elements);
for (ResourceLocation key : ForgeRegistries.ITEMS.getKeys()) {
Item value = ForgeRegistries.ITEMS.getValue(key);
ItemStack itemStack = new ItemStack(value);
String translationKey = itemStack.getTranslationKey();
if (!translationKey.endsWith(".name")) {
translationKey += ".name";
}
JsonObject object = new JsonObject();
object.addProperty("translationKey", translationKey);
object.add("item", IRegistryExportable.serializeItemStack(itemStack));
elements.add(object);
}
return element;
}
@Override
public String getName() {
return "item_translation_keys";
}
}
|
package org.cyclops.cyclopscore.metadata;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.registries.ForgeRegistries;
/**
* Item translation key exporter.
*/
public class RegistryExportableItemTranslationKeys implements IRegistryExportable {
@Override
public JsonObject export() {
JsonObject element = new JsonObject();
JsonArray elements = new JsonArray();
element.add("items", elements);
for (ResourceLocation key : ForgeRegistries.ITEMS.getKeys()) {
Item value = ForgeRegistries.ITEMS.getValue(key);
ItemStack itemStack = new ItemStack(value);
String translationKey = itemStack.getTranslationKey();
JsonObject object = new JsonObject();
object.addProperty("translationKey", translationKey);
object.add("item", IRegistryExportable.serializeItemStack(itemStack));
elements.add(object);
}
return element;
}
@Override
public String getName() {
return "item_translation_keys";
}
}
| 0
| 3
| 1
|
del_only
|
--- a/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
+++ b/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
@@ -24,5 +24,2 @@
String translationKey = itemStack.getTranslationKey();
- if (!translationKey.endsWith(".name")) {
- translationKey += ".name";
- }
|
--- a/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
+++ b/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
@@ ... @@
String translationKey = itemStack.getTranslationKey();
- if (!translationKey.endsWith(".name")) {
- translationKey += ".name";
- }
|
--- a/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
+++ b/src/main/java/org/cyclops/cyclopscore/metadata/RegistryExportableItemTranslationKeys.java
@@ -24,5 +24,2 @@
CON String translationKey = itemStack.getTranslationKey();
DEL if (!translationKey.endsWith(".name")) {
DEL translationKey += ".name";
DEL }
CON
|
<<<<<<< SEARCH
ItemStack itemStack = new ItemStack(value);
String translationKey = itemStack.getTranslationKey();
if (!translationKey.endsWith(".name")) {
translationKey += ".name";
}
JsonObject object = new JsonObject();
=======
ItemStack itemStack = new ItemStack(value);
String translationKey = itemStack.getTranslationKey();
JsonObject object = new JsonObject();
>>>>>>> REPLACE
|
vishwesh3/zulip-mobile
|
d093fea151e187e5e5c9014c6bfd54d6ce7f96f8
|
src/topics/TopicList.js
|
javascript
|
apache-2.0
|
ui: Add keyboardShouldPersistTaps to Topic List screen
Now that we have a search field and potentially a keyboard popped
make sure to process the first tap on the topic list.
|
/* @flow */
import React, { PureComponent } from 'react';
import { FlatList, StyleSheet } from 'react-native';
import type { Topic } from '../types';
import TopicItem from '../streams/TopicItem';
import { LoadingIndicator, SectionSeparatorBetween, SearchEmptyState } from '../common';
const styles = StyleSheet.create({
list: {
flex: 1,
flexDirection: 'column',
},
});
type Props = {
topics: ?(Topic[]),
onPress: (stream: string, topic: string) => void,
};
export default class TopicList extends PureComponent<Props> {
props: Props;
static defaultProps = {
showDescriptions: false,
showSwitch: false,
selected: false,
streams: [],
};
render() {
const { topics, onPress } = this.props;
if (!topics) {
return <LoadingIndicator size={40} />;
}
if (topics.length === 0) {
return <SearchEmptyState text="No topics found" />;
}
return (
<FlatList
style={styles.list}
data={topics}
keyExtractor={item => item.name}
renderItem={({ item }) => (
<TopicItem name={item.name} isMuted={false} unreadCount={0} onPress={onPress} />
)}
SectionSeparatorComponent={SectionSeparatorBetween}
/>
);
}
}
|
/* @flow */
import React, { PureComponent } from 'react';
import { FlatList, StyleSheet } from 'react-native';
import type { Topic } from '../types';
import TopicItem from '../streams/TopicItem';
import { LoadingIndicator, SectionSeparatorBetween, SearchEmptyState } from '../common';
const styles = StyleSheet.create({
list: {
flex: 1,
flexDirection: 'column',
},
});
type Props = {
topics: ?(Topic[]),
onPress: (stream: string, topic: string) => void,
};
export default class TopicList extends PureComponent<Props> {
props: Props;
static defaultProps = {
showDescriptions: false,
showSwitch: false,
selected: false,
streams: [],
};
render() {
const { topics, onPress } = this.props;
if (!topics) {
return <LoadingIndicator size={40} />;
}
if (topics.length === 0) {
return <SearchEmptyState text="No topics found" />;
}
return (
<FlatList
keyboardShouldPersistTaps="always"
style={styles.list}
data={topics}
keyExtractor={item => item.name}
renderItem={({ item }) => (
<TopicItem name={item.name} isMuted={false} unreadCount={0} onPress={onPress} />
)}
SectionSeparatorComponent={SectionSeparatorBetween}
/>
);
}
}
| 1
| 0
| 1
|
add_only
|
--- a/src/topics/TopicList.js
+++ b/src/topics/TopicList.js
@@ -43,2 +43,3 @@
<FlatList
+ keyboardShouldPersistTaps="always"
style={styles.list}
|
--- a/src/topics/TopicList.js
+++ b/src/topics/TopicList.js
@@ ... @@
<FlatList
+ keyboardShouldPersistTaps="always"
style={styles.list}
|
--- a/src/topics/TopicList.js
+++ b/src/topics/TopicList.js
@@ -43,2 +43,3 @@
CON <FlatList
ADD keyboardShouldPersistTaps="always"
CON style={styles.list}
|
<<<<<<< SEARCH
return (
<FlatList
style={styles.list}
data={topics}
=======
return (
<FlatList
keyboardShouldPersistTaps="always"
style={styles.list}
data={topics}
>>>>>>> REPLACE
|
redox-os/sodium
|
ef920b64c53e66de9d303398b1ab4a667a390dbd
|
parse.rs
|
rust
|
mit
|
Fix issues with unicode control chars
|
use super::*;
use redox::*;
/// Get the next instruction
// TODO: Should this be an iterator instead?
pub fn next_inst(editor: &mut Editor) -> Inst {
let mut n = 0;
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
if k.pressed {
let c = k.character;
match editor.cursor().mode {
Mode::Primitive(_) => {
return Inst(0, c);
},
Mode::Command(_) => {
n = match c {
'0' if n != 0 => n * 10,
'1' => n * 10 + 1,
'2' => n * 10 + 2,
'3' => n * 10 + 3,
'4' => n * 10 + 4,
'5' => n * 10 + 5,
'6' => n * 10 + 6,
'7' => n * 10 + 7,
'8' => n * 10 + 8,
'9' => n * 10 + 9,
_ => {
return Inst(if n == 0 { 1 } else { n }, c);
}
}
}
}
}
}
}
unreachable!()
}
|
use super::*;
use redox::*;
/// Get the next instruction
// TODO: Should this be an iterator instead?
pub fn next_inst(editor: &mut Editor) -> Inst {
let mut n = 0;
let mut shifted = false;
// TODO: Make the switch to normal mode shift more well-coded.
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
let c = k.character;
if shifted && c == '\u{000E}' {
editor.cursor_mut().mode = Mode::Command(CommandMode::Normal);
} else {
if k.pressed {
if c == '\u{000E}' {
shifted = true;
} else {
shifted = false;
match editor.cursor().mode {
Mode::Primitive(_) => {
return Inst(0, c);
},
Mode::Command(_) => {
n = match c {
'0' if n != 0 => n * 10,
'1' => n * 10 + 1,
'2' => n * 10 + 2,
'3' => n * 10 + 3,
'4' => n * 10 + 4,
'5' => n * 10 + 5,
'6' => n * 10 + 6,
'7' => n * 10 + 7,
'8' => n * 10 + 8,
'9' => n * 10 + 9,
_ => {
return Inst(if n == 0 { 1 } else { n }, c);
}
}
}
}
}
}
}
}
}
unreachable!()
}
| 31
| 20
| 1
|
mixed
|
--- a/parse.rs
+++ b/parse.rs
@@ -7,26 +7,37 @@
let mut n = 0;
+ let mut shifted = false;
+ // TODO: Make the switch to normal mode shift more well-coded.
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
- if k.pressed {
- let c = k.character;
- match editor.cursor().mode {
- Mode::Primitive(_) => {
- return Inst(0, c);
- },
- Mode::Command(_) => {
- n = match c {
- '0' if n != 0 => n * 10,
- '1' => n * 10 + 1,
- '2' => n * 10 + 2,
- '3' => n * 10 + 3,
- '4' => n * 10 + 4,
- '5' => n * 10 + 5,
- '6' => n * 10 + 6,
- '7' => n * 10 + 7,
- '8' => n * 10 + 8,
- '9' => n * 10 + 9,
- _ => {
+ let c = k.character;
+ if shifted && c == '\u{000E}' {
+ editor.cursor_mut().mode = Mode::Command(CommandMode::Normal);
+ } else {
+ if k.pressed {
+ if c == '\u{000E}' {
+ shifted = true;
+ } else {
+ shifted = false;
+ match editor.cursor().mode {
+ Mode::Primitive(_) => {
+ return Inst(0, c);
+ },
+ Mode::Command(_) => {
+ n = match c {
+ '0' if n != 0 => n * 10,
+ '1' => n * 10 + 1,
+ '2' => n * 10 + 2,
+ '3' => n * 10 + 3,
+ '4' => n * 10 + 4,
+ '5' => n * 10 + 5,
+ '6' => n * 10 + 6,
+ '7' => n * 10 + 7,
+ '8' => n * 10 + 8,
+ '9' => n * 10 + 9,
+ _ => {
- return Inst(if n == 0 { 1 } else { n }, c);
+ return Inst(if n == 0 { 1 } else { n }, c);
+ }
+ }
}
|
--- a/parse.rs
+++ b/parse.rs
@@ ... @@
let mut n = 0;
+ let mut shifted = false;
+ // TODO: Make the switch to normal mode shift more well-coded.
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
- if k.pressed {
- let c = k.character;
- match editor.cursor().mode {
- Mode::Primitive(_) => {
- return Inst(0, c);
- },
- Mode::Command(_) => {
- n = match c {
- '0' if n != 0 => n * 10,
- '1' => n * 10 + 1,
- '2' => n * 10 + 2,
- '3' => n * 10 + 3,
- '4' => n * 10 + 4,
- '5' => n * 10 + 5,
- '6' => n * 10 + 6,
- '7' => n * 10 + 7,
- '8' => n * 10 + 8,
- '9' => n * 10 + 9,
- _ => {
+ let c = k.character;
+ if shifted && c == '\u{000E}' {
+ editor.cursor_mut().mode = Mode::Command(CommandMode::Normal);
+ } else {
+ if k.pressed {
+ if c == '\u{000E}' {
+ shifted = true;
+ } else {
+ shifted = false;
+ match editor.cursor().mode {
+ Mode::Primitive(_) => {
+ return Inst(0, c);
+ },
+ Mode::Command(_) => {
+ n = match c {
+ '0' if n != 0 => n * 10,
+ '1' => n * 10 + 1,
+ '2' => n * 10 + 2,
+ '3' => n * 10 + 3,
+ '4' => n * 10 + 4,
+ '5' => n * 10 + 5,
+ '6' => n * 10 + 6,
+ '7' => n * 10 + 7,
+ '8' => n * 10 + 8,
+ '9' => n * 10 + 9,
+ _ => {
- return Inst(if n == 0 { 1 } else { n }, c);
+ return Inst(if n == 0 { 1 } else { n }, c);
+ }
+ }
}
|
--- a/parse.rs
+++ b/parse.rs
@@ -7,26 +7,37 @@
CON let mut n = 0;
ADD let mut shifted = false;
CON
ADD // TODO: Make the switch to normal mode shift more well-coded.
CON loop {
CON if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
DEL if k.pressed {
DEL let c = k.character;
DEL match editor.cursor().mode {
DEL Mode::Primitive(_) => {
DEL return Inst(0, c);
DEL },
DEL Mode::Command(_) => {
DEL n = match c {
DEL '0' if n != 0 => n * 10,
DEL '1' => n * 10 + 1,
DEL '2' => n * 10 + 2,
DEL '3' => n * 10 + 3,
DEL '4' => n * 10 + 4,
DEL '5' => n * 10 + 5,
DEL '6' => n * 10 + 6,
DEL '7' => n * 10 + 7,
DEL '8' => n * 10 + 8,
DEL '9' => n * 10 + 9,
DEL _ => {
ADD let c = k.character;
ADD if shifted && c == '\u{000E}' {
ADD editor.cursor_mut().mode = Mode::Command(CommandMode::Normal);
ADD } else {
ADD if k.pressed {
ADD if c == '\u{000E}' {
ADD shifted = true;
ADD } else {
ADD shifted = false;
ADD match editor.cursor().mode {
ADD Mode::Primitive(_) => {
ADD return Inst(0, c);
ADD },
ADD Mode::Command(_) => {
ADD n = match c {
ADD '0' if n != 0 => n * 10,
ADD '1' => n * 10 + 1,
ADD '2' => n * 10 + 2,
ADD '3' => n * 10 + 3,
ADD '4' => n * 10 + 4,
ADD '5' => n * 10 + 5,
ADD '6' => n * 10 + 6,
ADD '7' => n * 10 + 7,
ADD '8' => n * 10 + 8,
ADD '9' => n * 10 + 9,
ADD _ => {
CON
DEL return Inst(if n == 0 { 1 } else { n }, c);
ADD return Inst(if n == 0 { 1 } else { n }, c);
ADD }
ADD }
CON }
|
<<<<<<< SEARCH
pub fn next_inst(editor: &mut Editor) -> Inst {
let mut n = 0;
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
if k.pressed {
let c = k.character;
match editor.cursor().mode {
Mode::Primitive(_) => {
return Inst(0, c);
},
Mode::Command(_) => {
n = match c {
'0' if n != 0 => n * 10,
'1' => n * 10 + 1,
'2' => n * 10 + 2,
'3' => n * 10 + 3,
'4' => n * 10 + 4,
'5' => n * 10 + 5,
'6' => n * 10 + 6,
'7' => n * 10 + 7,
'8' => n * 10 + 8,
'9' => n * 10 + 9,
_ => {
return Inst(if n == 0 { 1 } else { n }, c);
}
}
=======
pub fn next_inst(editor: &mut Editor) -> Inst {
let mut n = 0;
let mut shifted = false;
// TODO: Make the switch to normal mode shift more well-coded.
loop {
if let EventOption::Key(k) = editor.window.poll().unwrap_or(Event::new()).to_option() {
let c = k.character;
if shifted && c == '\u{000E}' {
editor.cursor_mut().mode = Mode::Command(CommandMode::Normal);
} else {
if k.pressed {
if c == '\u{000E}' {
shifted = true;
} else {
shifted = false;
match editor.cursor().mode {
Mode::Primitive(_) => {
return Inst(0, c);
},
Mode::Command(_) => {
n = match c {
'0' if n != 0 => n * 10,
'1' => n * 10 + 1,
'2' => n * 10 + 2,
'3' => n * 10 + 3,
'4' => n * 10 + 4,
'5' => n * 10 + 5,
'6' => n * 10 + 6,
'7' => n * 10 + 7,
'8' => n * 10 + 8,
'9' => n * 10 + 9,
_ => {
return Inst(if n == 0 { 1 } else { n }, c);
}
}
}
}
>>>>>>> REPLACE
|
ktorio/ktor
|
5cfc7caf2183b0446ea3c9e4bbe9042c69d653ff
|
ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
|
kotlin
|
apache-2.0
|
Add DSL config to the gson-sample
|
package io.ktor.samples.gson
import io.ktor.application.*
import io.ktor.features.*
import io.ktor.gson.*
import io.ktor.http.*
import io.ktor.response.*
import io.ktor.routing.*
data class Model(val name: String, val items: List<Item>)
data class Item(val key: String, val value: String)
/*
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1/item/A
{"key":"A","value":"Apache"}
*/
fun Application.main() {
install(DefaultHeaders)
install(Compression)
install(CallLogging)
install(ContentNegotiation) {
register(ContentType.Application.Json, GsonConverter())
}
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
routing {
get("/v1") {
call.respond(model)
}
get("/v1/item/{key}") {
val item = model.items.firstOrNull { it.key == call.parameters["key"] }
if (item == null)
call.respond(HttpStatusCode.NotFound)
else
call.respond(item)
}
}
}
|
package io.ktor.samples.gson
import io.ktor.application.*
import io.ktor.features.*
import io.ktor.gson.*
import io.ktor.http.*
import io.ktor.response.*
import io.ktor.routing.*
import java.text.*
data class Model(val name: String, val items: List<Item>)
data class Item(val key: String, val value: String)
/*
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
The result is pretty printed, to show off how to configure gson, but it is
possible to use the default gson as well
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1/item/A
{"key":"A","value":"Apache"}
*/
fun Application.main() {
install(DefaultHeaders)
install(Compression)
install(CallLogging)
install(ContentNegotiation) {
gson {
setDateFormat(DateFormat.LONG)
setPrettyPrinting()
}
}
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
routing {
get("/v1") {
call.respond(model)
}
get("/v1/item/{key}") {
val item = model.items.firstOrNull { it.key == call.parameters["key"] }
if (item == null)
call.respond(HttpStatusCode.NotFound)
else
call.respond(item)
}
}
}
| 7
| 2
| 3
|
mixed
|
--- a/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
+++ b/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
@@ -8,2 +8,3 @@
import io.ktor.routing.*
+import java.text.*
@@ -15,2 +16,4 @@
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
+ The result is pretty printed, to show off how to configure gson, but it is
+ possible to use the default gson as well
@@ -25,5 +28,7 @@
install(ContentNegotiation) {
- register(ContentType.Application.Json, GsonConverter())
+ gson {
+ setDateFormat(DateFormat.LONG)
+ setPrettyPrinting()
+ }
}
-
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
|
--- a/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
+++ b/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
@@ ... @@
import io.ktor.routing.*
+import java.text.*
@@ ... @@
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
+ The result is pretty printed, to show off how to configure gson, but it is
+ possible to use the default gson as well
@@ ... @@
install(ContentNegotiation) {
- register(ContentType.Application.Json, GsonConverter())
+ gson {
+ setDateFormat(DateFormat.LONG)
+ setPrettyPrinting()
+ }
}
-
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
|
--- a/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
+++ b/ktor-samples/ktor-samples-gson/src/io/ktor/samples/gson/GsonApplication.kt
@@ -8,2 +8,3 @@
CON import io.ktor.routing.*
ADD import java.text.*
CON
@@ -15,2 +16,4 @@
CON {"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
ADD The result is pretty printed, to show off how to configure gson, but it is
ADD possible to use the default gson as well
CON
@@ -25,5 +28,7 @@
CON install(ContentNegotiation) {
DEL register(ContentType.Application.Json, GsonConverter())
ADD gson {
ADD setDateFormat(DateFormat.LONG)
ADD setPrettyPrinting()
ADD }
CON }
DEL
CON val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
|
<<<<<<< SEARCH
import io.ktor.response.*
import io.ktor.routing.*
data class Model(val name: String, val items: List<Item>)
=======
import io.ktor.response.*
import io.ktor.routing.*
import java.text.*
data class Model(val name: String, val items: List<Item>)
>>>>>>> REPLACE
<<<<<<< SEARCH
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1/item/A
=======
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1
{"name":"root","items":[{"key":"A","value":"Apache"},{"key":"B","value":"Bing"}]}
The result is pretty printed, to show off how to configure gson, but it is
possible to use the default gson as well
> curl -v --compress --header "Accept: application/gson" http://localhost:8080/v1/item/A
>>>>>>> REPLACE
<<<<<<< SEARCH
install(CallLogging)
install(ContentNegotiation) {
register(ContentType.Application.Json, GsonConverter())
}
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
routing {
=======
install(CallLogging)
install(ContentNegotiation) {
gson {
setDateFormat(DateFormat.LONG)
setPrettyPrinting()
}
}
val model = Model("root", listOf(Item("A", "Apache"), Item("B", "Bing")))
routing {
>>>>>>> REPLACE
|
monokrome/django-drift
|
c12f040fe9b0bbc3e47aed8f942de04216251f51
|
importer/loaders.py
|
python
|
mit
|
Allow configuration of extensions for types.
|
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
excel_extensions = [
'.xls',
'.xlsx',
]
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
return os.path.splitext(file_info.path)[-1] in excel_extensions
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
|
from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| 11
| 5
| 3
|
mixed
|
--- a/importer/loaders.py
+++ b/importer/loaders.py
@@ -1 +1,3 @@
+from django.conf import settings
+
import xlrd
@@ -7,6 +9,9 @@
-excel_extensions = [
- '.xls',
- '.xlsx',
-]
+extensions = getattr(
+ settings,
+ 'IMPORTER_EXTENSIONS',
+ {
+ 'excel': ('.xls', '.xlsx'),
+ }
+)
@@ -52,3 +57,4 @@
# TODO: Find a way to really sniff the file.
- return os.path.splitext(file_info.path)[-1] in excel_extensions
+ if not 'excel' in extensions: return False
+ return os.path.splitext(file_info.path)[-1] in extensions['excel']
|
--- a/importer/loaders.py
+++ b/importer/loaders.py
@@ ... @@
+from django.conf import settings
+
import xlrd
@@ ... @@
-excel_extensions = [
- '.xls',
- '.xlsx',
-]
+extensions = getattr(
+ settings,
+ 'IMPORTER_EXTENSIONS',
+ {
+ 'excel': ('.xls', '.xlsx'),
+ }
+)
@@ ... @@
# TODO: Find a way to really sniff the file.
- return os.path.splitext(file_info.path)[-1] in excel_extensions
+ if not 'excel' in extensions: return False
+ return os.path.splitext(file_info.path)[-1] in extensions['excel']
|
--- a/importer/loaders.py
+++ b/importer/loaders.py
@@ -1 +1,3 @@
ADD from django.conf import settings
ADD
CON import xlrd
@@ -7,6 +9,9 @@
CON
DEL excel_extensions = [
DEL '.xls',
DEL '.xlsx',
DEL ]
ADD extensions = getattr(
ADD settings,
ADD 'IMPORTER_EXTENSIONS',
ADD {
ADD 'excel': ('.xls', '.xlsx'),
ADD }
ADD )
CON
@@ -52,3 +57,4 @@
CON # TODO: Find a way to really sniff the file.
DEL return os.path.splitext(file_info.path)[-1] in excel_extensions
ADD if not 'excel' in extensions: return False
ADD return os.path.splitext(file_info.path)[-1] in extensions['excel']
CON
|
<<<<<<< SEARCH
import xlrd
import os
=======
from django.conf import settings
import xlrd
import os
>>>>>>> REPLACE
<<<<<<< SEARCH
excel_extensions = [
'.xls',
'.xlsx',
]
=======
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
>>>>>>> REPLACE
<<<<<<< SEARCH
# TODO: Find a way to really sniff the file.
return os.path.splitext(file_info.path)[-1] in excel_extensions
=======
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
>>>>>>> REPLACE
|
stallmanifold/rust-multiboot2
|
4abe342074bcacf7c9e6433846b07b7533bde6d2
|
src/elf_symbols.rs
|
rust
|
apache-2.0
|
Add basic support for ELF section parsing.
|
// Elf section headers have a fixed size.
const ELF_SECTION_HEADER_SIZE: usize = 56;
#[repr(packed)]
struct ElfSymbolTag {
tag_type: u32,
size: u32,
num: u16,
entsize: u16,
shndx: u16,
reserved: u16,
first_section: ElfSectionHeader
}
struct ElfSectionHeader {
sh_name: u32,
sh_type: u32,
sh_flags: u64,
sh_addr: u64,
sh_offset: u64,
sh_size: u64,
sh_link: u32,
sh_info: u32,
sh_addralign: u64,
sh_entsize: u64
}
struct ElfSectionIter {
current_section: &'static ElfSectionHeader,
final_section: &'static ElfSectionHeader,
remaining_sections: u32,
entry_size: u32
}
|
#[repr(packed)]
struct ElfSymbolTag {
tag_type: u32,
size: u32,
num: u16,
entsize: u16,
shndx: u16,
reserved: u16,
first_section: ElfSectionHeader
}
impl ElfSymbolTag {
pub fn elf_sections(&'static self) -> ElfSectionIter {
ElfSectionIter {
current_section: &self.first_section,
section_index: 0,
total_sections: self.num as u32,
entry_size: self.entsize as u64
}
}
}
#[repr(C)]
struct ElfSectionHeader {
sh_name: u32,
sh_type: u32,
sh_flags: u64,
sh_addr: u64,
sh_offset: u64,
sh_size: u64,
sh_link: u32,
sh_info: u32,
sh_addralign: u64,
sh_entsize: u64
}
struct ElfSectionIter {
current_section: &'static ElfSectionHeader,
section_index: u32,
total_sections: u32,
entry_size: u64
}
impl Iterator for ElfSectionIter {
type Item = &'static ElfSectionHeader;
fn next(&mut self) -> Option<Self::Item> {
if self.section_index >= self.total_sections {
return None;
} else {
let section = self.current_section;
let next_section_addr =
(self.current_section as *const ElfSectionHeader as u64) + self.entry_size;
let next_section = unsafe {
&*(next_section_addr as *const ElfSectionHeader)
};
self.current_section = next_section;
self.section_index += 1;
Some(section)
}
}
}
| 37
| 8
| 3
|
mixed
|
--- a/src/elf_symbols.rs
+++ b/src/elf_symbols.rs
@@ -1,6 +1 @@
-
-
-// Elf section headers have a fixed size.
-const ELF_SECTION_HEADER_SIZE: usize = 56;
-
#[repr(packed)]
@@ -16,2 +11,14 @@
+impl ElfSymbolTag {
+ pub fn elf_sections(&'static self) -> ElfSectionIter {
+ ElfSectionIter {
+ current_section: &self.first_section,
+ section_index: 0,
+ total_sections: self.num as u32,
+ entry_size: self.entsize as u64
+ }
+ }
+}
+
+#[repr(C)]
struct ElfSectionHeader {
@@ -31,5 +38,27 @@
current_section: &'static ElfSectionHeader,
- final_section: &'static ElfSectionHeader,
- remaining_sections: u32,
- entry_size: u32
+ section_index: u32,
+ total_sections: u32,
+ entry_size: u64
}
+
+impl Iterator for ElfSectionIter {
+ type Item = &'static ElfSectionHeader;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.section_index >= self.total_sections {
+ return None;
+ } else {
+ let section = self.current_section;
+ let next_section_addr =
+ (self.current_section as *const ElfSectionHeader as u64) + self.entry_size;
+ let next_section = unsafe {
+ &*(next_section_addr as *const ElfSectionHeader)
+ };
+
+ self.current_section = next_section;
+ self.section_index += 1;
+
+ Some(section)
+ }
+ }
+}
|
--- a/src/elf_symbols.rs
+++ b/src/elf_symbols.rs
@@ ... @@
-
-
-// Elf section headers have a fixed size.
-const ELF_SECTION_HEADER_SIZE: usize = 56;
-
#[repr(packed)]
@@ ... @@
+impl ElfSymbolTag {
+ pub fn elf_sections(&'static self) -> ElfSectionIter {
+ ElfSectionIter {
+ current_section: &self.first_section,
+ section_index: 0,
+ total_sections: self.num as u32,
+ entry_size: self.entsize as u64
+ }
+ }
+}
+
+#[repr(C)]
struct ElfSectionHeader {
@@ ... @@
current_section: &'static ElfSectionHeader,
- final_section: &'static ElfSectionHeader,
- remaining_sections: u32,
- entry_size: u32
+ section_index: u32,
+ total_sections: u32,
+ entry_size: u64
}
+
+impl Iterator for ElfSectionIter {
+ type Item = &'static ElfSectionHeader;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.section_index >= self.total_sections {
+ return None;
+ } else {
+ let section = self.current_section;
+ let next_section_addr =
+ (self.current_section as *const ElfSectionHeader as u64) + self.entry_size;
+ let next_section = unsafe {
+ &*(next_section_addr as *const ElfSectionHeader)
+ };
+
+ self.current_section = next_section;
+ self.section_index += 1;
+
+ Some(section)
+ }
+ }
+}
|
--- a/src/elf_symbols.rs
+++ b/src/elf_symbols.rs
@@ -1,6 +1 @@
DEL
DEL
DEL // Elf section headers have a fixed size.
DEL const ELF_SECTION_HEADER_SIZE: usize = 56;
DEL
CON #[repr(packed)]
@@ -16,2 +11,14 @@
CON
ADD impl ElfSymbolTag {
ADD pub fn elf_sections(&'static self) -> ElfSectionIter {
ADD ElfSectionIter {
ADD current_section: &self.first_section,
ADD section_index: 0,
ADD total_sections: self.num as u32,
ADD entry_size: self.entsize as u64
ADD }
ADD }
ADD }
ADD
ADD #[repr(C)]
CON struct ElfSectionHeader {
@@ -31,5 +38,27 @@
CON current_section: &'static ElfSectionHeader,
DEL final_section: &'static ElfSectionHeader,
DEL remaining_sections: u32,
DEL entry_size: u32
ADD section_index: u32,
ADD total_sections: u32,
ADD entry_size: u64
CON }
ADD
ADD impl Iterator for ElfSectionIter {
ADD type Item = &'static ElfSectionHeader;
ADD
ADD fn next(&mut self) -> Option<Self::Item> {
ADD if self.section_index >= self.total_sections {
ADD return None;
ADD } else {
ADD let section = self.current_section;
ADD let next_section_addr =
ADD (self.current_section as *const ElfSectionHeader as u64) + self.entry_size;
ADD let next_section = unsafe {
ADD &*(next_section_addr as *const ElfSectionHeader)
ADD };
ADD
ADD self.current_section = next_section;
ADD self.section_index += 1;
ADD
ADD Some(section)
ADD }
ADD }
ADD }
|
<<<<<<< SEARCH
// Elf section headers have a fixed size.
const ELF_SECTION_HEADER_SIZE: usize = 56;
#[repr(packed)]
struct ElfSymbolTag {
=======
#[repr(packed)]
struct ElfSymbolTag {
>>>>>>> REPLACE
<<<<<<< SEARCH
}
struct ElfSectionHeader {
sh_name: u32,
=======
}
impl ElfSymbolTag {
pub fn elf_sections(&'static self) -> ElfSectionIter {
ElfSectionIter {
current_section: &self.first_section,
section_index: 0,
total_sections: self.num as u32,
entry_size: self.entsize as u64
}
}
}
#[repr(C)]
struct ElfSectionHeader {
sh_name: u32,
>>>>>>> REPLACE
<<<<<<< SEARCH
struct ElfSectionIter {
current_section: &'static ElfSectionHeader,
final_section: &'static ElfSectionHeader,
remaining_sections: u32,
entry_size: u32
}
=======
struct ElfSectionIter {
current_section: &'static ElfSectionHeader,
section_index: u32,
total_sections: u32,
entry_size: u64
}
impl Iterator for ElfSectionIter {
type Item = &'static ElfSectionHeader;
fn next(&mut self) -> Option<Self::Item> {
if self.section_index >= self.total_sections {
return None;
} else {
let section = self.current_section;
let next_section_addr =
(self.current_section as *const ElfSectionHeader as u64) + self.entry_size;
let next_section = unsafe {
&*(next_section_addr as *const ElfSectionHeader)
};
self.current_section = next_section;
self.section_index += 1;
Some(section)
}
}
}
>>>>>>> REPLACE
|
yunity/yunity-core
|
dc461956408ffa35e2391fccf4231d60144985f7
|
yunity/groups/api.py
|
python
|
agpl-3.0
|
Fix permissions for groups endpoint
|
from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
permission_classes = (IsAuthenticatedOrReadOnly,)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
|
from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class IsMember(BasePermission):
message = 'You are not a member.'
def has_object_permission(self, request, view, obj):
return request.user in obj.members.all()
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
def get_permissions(self):
if self.action in ('update', 'partial_update', 'destroy'):
self.permission_classes = (IsMember,)
else:
self.permission_classes = (IsAuthenticatedOrReadOnly,)
return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| 16
| 2
| 3
|
mixed
|
--- a/yunity/groups/api.py
+++ b/yunity/groups/api.py
@@ -3,3 +3,3 @@
from rest_framework.decorators import detail_route
-from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
+from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
@@ -7,2 +7,9 @@
from yunity.groups.models import Group as GroupModel
+
+
+class IsMember(BasePermission):
+ message = 'You are not a member.'
+
+ def has_object_permission(self, request, view, obj):
+ return request.user in obj.members.all()
@@ -15,3 +22,10 @@
search_fields = ('name', 'description')
- permission_classes = (IsAuthenticatedOrReadOnly,)
+
+ def get_permissions(self):
+ if self.action in ('update', 'partial_update', 'destroy'):
+ self.permission_classes = (IsMember,)
+ else:
+ self.permission_classes = (IsAuthenticatedOrReadOnly,)
+
+ return super().get_permissions()
|
--- a/yunity/groups/api.py
+++ b/yunity/groups/api.py
@@ ... @@
from rest_framework.decorators import detail_route
-from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
+from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
@@ ... @@
from yunity.groups.models import Group as GroupModel
+
+
+class IsMember(BasePermission):
+ message = 'You are not a member.'
+
+ def has_object_permission(self, request, view, obj):
+ return request.user in obj.members.all()
@@ ... @@
search_fields = ('name', 'description')
- permission_classes = (IsAuthenticatedOrReadOnly,)
+
+ def get_permissions(self):
+ if self.action in ('update', 'partial_update', 'destroy'):
+ self.permission_classes = (IsMember,)
+ else:
+ self.permission_classes = (IsAuthenticatedOrReadOnly,)
+
+ return super().get_permissions()
|
--- a/yunity/groups/api.py
+++ b/yunity/groups/api.py
@@ -3,3 +3,3 @@
CON from rest_framework.decorators import detail_route
DEL from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
ADD from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
CON from rest_framework.response import Response
@@ -7,2 +7,9 @@
CON from yunity.groups.models import Group as GroupModel
ADD
ADD
ADD class IsMember(BasePermission):
ADD message = 'You are not a member.'
ADD
ADD def has_object_permission(self, request, view, obj):
ADD return request.user in obj.members.all()
CON
@@ -15,3 +22,10 @@
CON search_fields = ('name', 'description')
DEL permission_classes = (IsAuthenticatedOrReadOnly,)
ADD
ADD def get_permissions(self):
ADD if self.action in ('update', 'partial_update', 'destroy'):
ADD self.permission_classes = (IsMember,)
ADD else:
ADD self.permission_classes = (IsAuthenticatedOrReadOnly,)
ADD
ADD return super().get_permissions()
CON
|
<<<<<<< SEARCH
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
=======
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class IsMember(BasePermission):
message = 'You are not a member.'
def has_object_permission(self, request, view, obj):
return request.user in obj.members.all()
>>>>>>> REPLACE
<<<<<<< SEARCH
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
permission_classes = (IsAuthenticatedOrReadOnly,)
@detail_route(methods=['POST', 'GET'],
=======
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
def get_permissions(self):
if self.action in ('update', 'partial_update', 'destroy'):
self.permission_classes = (IsMember,)
else:
self.permission_classes = (IsAuthenticatedOrReadOnly,)
return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
>>>>>>> REPLACE
|
hch12907/wiz
|
7c8fa41625fd17c73876420e300e7c787cb056f6
|
src/wiz/extraction.rs
|
rust
|
bsd-3-clause
|
Use match instead of unwrap
|
extern crate tar;
extern crate flate2;
use std::fs::File;
use std::io::{BufReader, Read, Write};
use std::path::Path;
use self::tar::Archive;
use self::flate2::read::GzDecoder;
fn extract_tar(input: &Path, output: &Path) {
let file = File::open(input).unwrap();
let mut archive = Archive::new(file);
archive.unpack(output).unwrap();
}
fn extract_gz(input: &Path, output: &Path) {
let file = File::open(input).unwrap();
let buffer = BufReader::new(file);
let archive = GzDecoder::new(buffer).unwrap();
let mut target = File::create(output).unwrap();
for byte in archive.bytes() {
target.write(&[byte.unwrap()]);
}
}
|
extern crate tar;
extern crate flate2;
use std::fs::File;
use std::io::{BufReader, Read, Write};
use std::path::Path;
use self::tar::Archive;
use self::flate2::read::GzDecoder;
fn extract_tar(input: &Path, output: &Path) {
let file = match File::open(input)
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let mut archive = Archive::new(file);
archive.unpack(output).unwrap();
}
fn extract_gz(input: &Path, output: &Path) {
let file = match File::open(input)
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let archive = match GzDecoder::new(BufReader::new(file))
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let mut target = File::create(output).unwrap();
for byte in archive.bytes() {
target.write(&[byte.unwrap()]);
}
}
| 17
| 4
| 2
|
mixed
|
--- a/src/wiz/extraction.rs
+++ b/src/wiz/extraction.rs
@@ -11,3 +11,8 @@
fn extract_tar(input: &Path, output: &Path) {
- let file = File::open(input).unwrap();
+ let file = match File::open(input)
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
+
let mut archive = Archive::new(file);
@@ -17,5 +22,13 @@
fn extract_gz(input: &Path, output: &Path) {
- let file = File::open(input).unwrap();
- let buffer = BufReader::new(file);
- let archive = GzDecoder::new(buffer).unwrap();
+ let file = match File::open(input)
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
+
+ let archive = match GzDecoder::new(BufReader::new(file))
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
|
--- a/src/wiz/extraction.rs
+++ b/src/wiz/extraction.rs
@@ ... @@
fn extract_tar(input: &Path, output: &Path) {
- let file = File::open(input).unwrap();
+ let file = match File::open(input)
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
+
let mut archive = Archive::new(file);
@@ ... @@
fn extract_gz(input: &Path, output: &Path) {
- let file = File::open(input).unwrap();
- let buffer = BufReader::new(file);
- let archive = GzDecoder::new(buffer).unwrap();
+ let file = match File::open(input)
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
+
+ let archive = match GzDecoder::new(BufReader::new(file))
+ {
+ Ok(x) => x,
+ Err(why) => panic!("An error occured. \n{}", why),
+ };
|
--- a/src/wiz/extraction.rs
+++ b/src/wiz/extraction.rs
@@ -11,3 +11,8 @@
CON fn extract_tar(input: &Path, output: &Path) {
DEL let file = File::open(input).unwrap();
ADD let file = match File::open(input)
ADD {
ADD Ok(x) => x,
ADD Err(why) => panic!("An error occured. \n{}", why),
ADD };
ADD
CON let mut archive = Archive::new(file);
@@ -17,5 +22,13 @@
CON fn extract_gz(input: &Path, output: &Path) {
DEL let file = File::open(input).unwrap();
DEL let buffer = BufReader::new(file);
DEL let archive = GzDecoder::new(buffer).unwrap();
ADD let file = match File::open(input)
ADD {
ADD Ok(x) => x,
ADD Err(why) => panic!("An error occured. \n{}", why),
ADD };
ADD
ADD let archive = match GzDecoder::new(BufReader::new(file))
ADD {
ADD Ok(x) => x,
ADD Err(why) => panic!("An error occured. \n{}", why),
ADD };
CON
|
<<<<<<< SEARCH
fn extract_tar(input: &Path, output: &Path) {
let file = File::open(input).unwrap();
let mut archive = Archive::new(file);
archive.unpack(output).unwrap();
}
fn extract_gz(input: &Path, output: &Path) {
let file = File::open(input).unwrap();
let buffer = BufReader::new(file);
let archive = GzDecoder::new(buffer).unwrap();
let mut target = File::create(output).unwrap();
=======
fn extract_tar(input: &Path, output: &Path) {
let file = match File::open(input)
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let mut archive = Archive::new(file);
archive.unpack(output).unwrap();
}
fn extract_gz(input: &Path, output: &Path) {
let file = match File::open(input)
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let archive = match GzDecoder::new(BufReader::new(file))
{
Ok(x) => x,
Err(why) => panic!("An error occured. \n{}", why),
};
let mut target = File::create(output).unwrap();
>>>>>>> REPLACE
|
getguesstimate/guesstimate-app
|
15481c310e35027f011f541e7ae87fd089987fdf
|
src/components/calculators/input.js
|
javascript
|
mit
|
Add delay to field focus to ensure it will work
|
import React, {Component} from 'react'
import Icon from 'react-fa'
import {EditorState, Editor, ContentState} from 'draft-js'
export class Input extends Component{
state = {editorState: EditorState.createWithContent(ContentState.createFromText(''))}
componentDidMount() {
if (this.props.isFirst) {
// this.refs.editor.focus()
window.thiswillwork = this.refs.editor
}
}
onChange(editorState) {
this.props.onChange(editorState.getCurrentContent().getPlainText(''))
return this.setState({editorState})
}
hasValidContent() {
const content = this.state.editorState.getCurrentContent().getPlainText('')
return !_.isEmpty(content) && _.isEmpty(this.props.errors)
}
render () {
const {name, errors} = this.props
return (
<div className='input'>
<div className='row'>
<div className='col-md-7'><div className='name'>{name}</div></div>
<div className='col-md-5'>
<div className='editor'>
<Editor
ref='editor'
editorState={this.state.editorState}
onChange={this.onChange.bind(this)}
handleReturn={() => true}
/>
{!_.isEmpty(errors) && <div className='error-alert'><Icon name='warning' /></div>}
</div>
</div>
</div>
</div>
)
}
}
|
import React, {Component} from 'react'
import Icon from 'react-fa'
import {EditorState, Editor, ContentState} from 'draft-js'
export class Input extends Component{
state = {editorState: EditorState.createWithContent(ContentState.createFromText(''))}
componentDidMount() {
if (this.props.isFirst) {
setTimeout(() => {this.refs.editor.focus()}, 1)
}
}
onChange(editorState) {
this.props.onChange(editorState.getCurrentContent().getPlainText(''))
return this.setState({editorState})
}
hasValidContent() {
const content = this.state.editorState.getCurrentContent().getPlainText('')
return !_.isEmpty(content) && _.isEmpty(this.props.errors)
}
render () {
const {name, errors} = this.props
return (
<div className='input'>
<div className='row'>
<div className='col-md-7'><div className='name'>{name}</div></div>
<div className='col-md-5'>
<div className='editor'>
<Editor
ref='editor'
editorState={this.state.editorState}
onChange={this.onChange.bind(this)}
handleReturn={() => true}
/>
{!_.isEmpty(errors) && <div className='error-alert'><Icon name='warning' /></div>}
</div>
</div>
</div>
</div>
)
}
}
| 1
| 2
| 1
|
mixed
|
--- a/src/components/calculators/input.js
+++ b/src/components/calculators/input.js
@@ -11,4 +11,3 @@
if (this.props.isFirst) {
- // this.refs.editor.focus()
- window.thiswillwork = this.refs.editor
+ setTimeout(() => {this.refs.editor.focus()}, 1)
}
|
--- a/src/components/calculators/input.js
+++ b/src/components/calculators/input.js
@@ ... @@
if (this.props.isFirst) {
- // this.refs.editor.focus()
- window.thiswillwork = this.refs.editor
+ setTimeout(() => {this.refs.editor.focus()}, 1)
}
|
--- a/src/components/calculators/input.js
+++ b/src/components/calculators/input.js
@@ -11,4 +11,3 @@
CON if (this.props.isFirst) {
DEL // this.refs.editor.focus()
DEL window.thiswillwork = this.refs.editor
ADD setTimeout(() => {this.refs.editor.focus()}, 1)
CON }
|
<<<<<<< SEARCH
componentDidMount() {
if (this.props.isFirst) {
// this.refs.editor.focus()
window.thiswillwork = this.refs.editor
}
}
=======
componentDidMount() {
if (this.props.isFirst) {
setTimeout(() => {this.refs.editor.focus()}, 1)
}
}
>>>>>>> REPLACE
|
YACOWS/opps
|
5e1daf36d604ee1898e8486458013e63010d6888
|
opps/api/models.py
|
python
|
mit
|
Add missing translations on API model
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
import hmac
from django.db import models
from django.conf import settings
from django.contrib.auth import get_user_model
try:
from hashlib import sha1
except ImportError:
import sha
sha1 = sha.sha
User = get_user_model()
class ApiKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
key = models.CharField(u"Key", max_length=255)
date_insert = models.DateTimeField(u"Date insert", auto_now_add=True)
def __unicode__(self):
return u"{} for {}".format(self.key, self.user)
def save(self, *args, **kwargs):
if not self.key:
self.key = self.generate_key()
return super(ApiKey, self).save(*args, **kwargs)
def generate_key(self):
new_uuid = uuid.uuid4()
return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
def create_api_key(sender, **kwargs):
if kwargs.get('created') is True:
ApiKey.objects.create(user=kwargs.get('instance'))
models.signals.post_save.connect(create_api_key, User)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
import hmac
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
try:
from hashlib import sha1
except ImportError:
import sha
sha1 = sha.sha
User = get_user_model()
class ApiKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_(u"User"))
key = models.CharField(_(u"Key"), max_length=255)
date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
def __unicode__(self):
return u"{} for {}".format(self.key, self.user)
def save(self, *args, **kwargs):
if not self.key:
self.key = self.generate_key()
return super(ApiKey, self).save(*args, **kwargs)
def generate_key(self):
new_uuid = uuid.uuid4()
return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
class Meta:
verbose_name = _(u"API Key")
verbose_name_plural = _(u"API Keys")
def create_api_key(sender, **kwargs):
if kwargs.get('created') is True:
ApiKey.objects.create(user=kwargs.get('instance'))
models.signals.post_save.connect(create_api_key, User)
| 9
| 3
| 3
|
mixed
|
--- a/opps/api/models.py
+++ b/opps/api/models.py
@@ -7,2 +7,3 @@
from django.conf import settings
+from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
@@ -20,5 +21,6 @@
class ApiKey(models.Model):
- user = models.ForeignKey(settings.AUTH_USER_MODEL)
- key = models.CharField(u"Key", max_length=255)
- date_insert = models.DateTimeField(u"Date insert", auto_now_add=True)
+ user = models.ForeignKey(settings.AUTH_USER_MODEL,
+ verbose_name=_(u"User"))
+ key = models.CharField(_(u"Key"), max_length=255)
+ date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
@@ -36,2 +38,6 @@
+ class Meta:
+ verbose_name = _(u"API Key")
+ verbose_name_plural = _(u"API Keys")
+
|
--- a/opps/api/models.py
+++ b/opps/api/models.py
@@ ... @@
from django.conf import settings
+from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
@@ ... @@
class ApiKey(models.Model):
- user = models.ForeignKey(settings.AUTH_USER_MODEL)
- key = models.CharField(u"Key", max_length=255)
- date_insert = models.DateTimeField(u"Date insert", auto_now_add=True)
+ user = models.ForeignKey(settings.AUTH_USER_MODEL,
+ verbose_name=_(u"User"))
+ key = models.CharField(_(u"Key"), max_length=255)
+ date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
@@ ... @@
+ class Meta:
+ verbose_name = _(u"API Key")
+ verbose_name_plural = _(u"API Keys")
+
|
--- a/opps/api/models.py
+++ b/opps/api/models.py
@@ -7,2 +7,3 @@
CON from django.conf import settings
ADD from django.utils.translation import ugettext_lazy as _
CON from django.contrib.auth import get_user_model
@@ -20,5 +21,6 @@
CON class ApiKey(models.Model):
DEL user = models.ForeignKey(settings.AUTH_USER_MODEL)
DEL key = models.CharField(u"Key", max_length=255)
DEL date_insert = models.DateTimeField(u"Date insert", auto_now_add=True)
ADD user = models.ForeignKey(settings.AUTH_USER_MODEL,
ADD verbose_name=_(u"User"))
ADD key = models.CharField(_(u"Key"), max_length=255)
ADD date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
CON
@@ -36,2 +38,6 @@
CON
ADD class Meta:
ADD verbose_name = _(u"API Key")
ADD verbose_name_plural = _(u"API Keys")
ADD
CON
|
<<<<<<< SEARCH
from django.db import models
from django.conf import settings
from django.contrib.auth import get_user_model
=======
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
>>>>>>> REPLACE
<<<<<<< SEARCH
class ApiKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
key = models.CharField(u"Key", max_length=255)
date_insert = models.DateTimeField(u"Date insert", auto_now_add=True)
def __unicode__(self):
=======
class ApiKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_(u"User"))
key = models.CharField(_(u"Key"), max_length=255)
date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
def __unicode__(self):
>>>>>>> REPLACE
<<<<<<< SEARCH
return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
def create_api_key(sender, **kwargs):
=======
return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
class Meta:
verbose_name = _(u"API Key")
verbose_name_plural = _(u"API Keys")
def create_api_key(sender, **kwargs):
>>>>>>> REPLACE
|
sgade/hgots-node
|
0b38c9c37b57ab6c857b85a7ce6dfc0911f917e0
|
src/web/routes/api/v1/users.js
|
javascript
|
mit
|
Set content type of api response to JSON.
|
var helpers = require('./helpers');
var db = helpers.db;
/* /users */
exports.getAllUsers = function(req, res) {
helpers.getRequestingUser(req, function(err, user) {
if ( err ) {
res.status(500).end();
console.log("getAllUsers:", err);
} else {
if ( !user ) {
res.status(403).end();
} else {
// TODO filter for type
helpers.getAllUsers(function(err, users) {
if ( err ) {
res.status(500).end();
} else {
res.end(JSON.stringify(users));
}
});
}
}
});
};
/* /user/:id/cards */
exports.getCardsOfUser = function(req, res) {
helpers.getRequestingUser(req, function(err, user) {
if ( err ) {
res.status(500).end();
} else {
if ( !user ) {
res.status(403).end();
} else {
// TODO filter for type
var id = req.params.id;
helpers.getUser({
id: id
}, function(err, user) {
if ( user ) {
user.getCards().success(function(cards) {
if ( cards ) {
res.end(JSON.stringify(cards));
} else {
res.status(500).end();
}
});
}
});
}
}
});
};
|
var helpers = require('./helpers');
var db = helpers.db;
/* /users */
exports.getAllUsers = function(req, res) {
helpers.getRequestingUser(req, function(err, user) {
if ( err ) {
res.status(500).end();
console.log("getAllUsers:", err);
} else {
if ( !user ) {
res.status(403).end();
} else {
// TODO filter for type
helpers.getAllUsers(function(err, users) {
if ( err ) {
res.status(500).end();
} else {
res.set('Content-Type', 'application/json');
res.end(JSON.stringify(users));
}
});
}
}
});
};
/* /user/:id/cards */
exports.getCardsOfUser = function(req, res) {
helpers.getRequestingUser(req, function(err, user) {
if ( err ) {
res.status(500).end();
} else {
if ( !user ) {
res.status(403).end();
} else {
// TODO filter for type
var id = req.params.id;
helpers.getUser({
id: id
}, function(err, user) {
if ( user ) {
user.getCards().success(function(cards) {
if ( cards ) {
res.set('Content-Type', 'application/json');
res.end(JSON.stringify(cards));
} else {
res.status(500).end();
}
});
}
});
}
}
});
};
| 2
| 0
| 2
|
add_only
|
--- a/src/web/routes/api/v1/users.js
+++ b/src/web/routes/api/v1/users.js
@@ -22,2 +22,3 @@
+ res.set('Content-Type', 'application/json');
res.end(JSON.stringify(users));
@@ -56,2 +57,3 @@
if ( cards ) {
+ res.set('Content-Type', 'application/json');
res.end(JSON.stringify(cards));
|
--- a/src/web/routes/api/v1/users.js
+++ b/src/web/routes/api/v1/users.js
@@ ... @@
+ res.set('Content-Type', 'application/json');
res.end(JSON.stringify(users));
@@ ... @@
if ( cards ) {
+ res.set('Content-Type', 'application/json');
res.end(JSON.stringify(cards));
|
--- a/src/web/routes/api/v1/users.js
+++ b/src/web/routes/api/v1/users.js
@@ -22,2 +22,3 @@
CON
ADD res.set('Content-Type', 'application/json');
CON res.end(JSON.stringify(users));
@@ -56,2 +57,3 @@
CON if ( cards ) {
ADD res.set('Content-Type', 'application/json');
CON res.end(JSON.stringify(cards));
|
<<<<<<< SEARCH
} else {
res.end(JSON.stringify(users));
=======
} else {
res.set('Content-Type', 'application/json');
res.end(JSON.stringify(users));
>>>>>>> REPLACE
<<<<<<< SEARCH
if ( cards ) {
res.end(JSON.stringify(cards));
} else {
=======
if ( cards ) {
res.set('Content-Type', 'application/json');
res.end(JSON.stringify(cards));
} else {
>>>>>>> REPLACE
|
google/site-kit-wp
|
efbf3086d5db81bfeaaf386d862a76a7b45e3020
|
assets/js/googlesitekit-activation.js
|
javascript
|
apache-2.0
|
Send plugin_activated event on activation.
|
/**
* Activation component.
*
* This JavaScript loads on every admin page. Reserved for later.
*
* Site Kit by Google, Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* WordPress dependencies
*/
import domReady from '@wordpress/dom-ready';
import { render } from '@wordpress/element';
import { doAction } from '@wordpress/hooks';
/**
* External dependencies
*/
import { loadTranslations } from 'GoogleUtil';
import 'GoogleComponents/notifications';
/**
* Internal dependencies
*/
import { ActivationApp } from './components/activation/activation-app';
domReady( () => {
const renderTarget = document.getElementById( 'js-googlesitekit-activation' );
if ( renderTarget ) {
loadTranslations();
render( <ActivationApp />, renderTarget );
/**
* Action triggered when the ActivationApp is loaded.
*/
doAction( 'googlesitekit.moduleLoaded', 'Activation' );
}
} );
|
/**
* Activation component.
*
* This JavaScript loads on every admin page. Reserved for later.
*
* Site Kit by Google, Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* WordPress dependencies
*/
import domReady from '@wordpress/dom-ready';
import { render } from '@wordpress/element';
import { doAction } from '@wordpress/hooks';
/**
* External dependencies
*/
import { loadTranslations, sendAnalyticsTrackingEvent } from 'GoogleUtil';
import 'GoogleComponents/notifications';
/**
* Internal dependencies
*/
import { ActivationApp } from './components/activation/activation-app';
domReady( () => {
const renderTarget = document.getElementById( 'js-googlesitekit-activation' );
if ( renderTarget ) {
loadTranslations();
sendAnalyticsTrackingEvent( 'plugin_setup', 'plugin_activated' );
render( <ActivationApp />, renderTarget );
/**
* Action triggered when the ActivationApp is loaded.
*/
doAction( 'googlesitekit.moduleLoaded', 'Activation' );
}
} );
| 2
| 1
| 2
|
mixed
|
--- a/assets/js/googlesitekit-activation.js
+++ b/assets/js/googlesitekit-activation.js
@@ -30,3 +30,3 @@
*/
-import { loadTranslations } from 'GoogleUtil';
+import { loadTranslations, sendAnalyticsTrackingEvent } from 'GoogleUtil';
import 'GoogleComponents/notifications';
@@ -43,2 +43,3 @@
loadTranslations();
+ sendAnalyticsTrackingEvent( 'plugin_setup', 'plugin_activated' );
|
--- a/assets/js/googlesitekit-activation.js
+++ b/assets/js/googlesitekit-activation.js
@@ ... @@
*/
-import { loadTranslations } from 'GoogleUtil';
+import { loadTranslations, sendAnalyticsTrackingEvent } from 'GoogleUtil';
import 'GoogleComponents/notifications';
@@ ... @@
loadTranslations();
+ sendAnalyticsTrackingEvent( 'plugin_setup', 'plugin_activated' );
|
--- a/assets/js/googlesitekit-activation.js
+++ b/assets/js/googlesitekit-activation.js
@@ -30,3 +30,3 @@
CON */
DEL import { loadTranslations } from 'GoogleUtil';
ADD import { loadTranslations, sendAnalyticsTrackingEvent } from 'GoogleUtil';
CON import 'GoogleComponents/notifications';
@@ -43,2 +43,3 @@
CON loadTranslations();
ADD sendAnalyticsTrackingEvent( 'plugin_setup', 'plugin_activated' );
CON
|
<<<<<<< SEARCH
* External dependencies
*/
import { loadTranslations } from 'GoogleUtil';
import 'GoogleComponents/notifications';
=======
* External dependencies
*/
import { loadTranslations, sendAnalyticsTrackingEvent } from 'GoogleUtil';
import 'GoogleComponents/notifications';
>>>>>>> REPLACE
<<<<<<< SEARCH
if ( renderTarget ) {
loadTranslations();
render( <ActivationApp />, renderTarget );
=======
if ( renderTarget ) {
loadTranslations();
sendAnalyticsTrackingEvent( 'plugin_setup', 'plugin_activated' );
render( <ActivationApp />, renderTarget );
>>>>>>> REPLACE
|
ksmithbaylor/emc-license-summarizer
|
afa811016080d53be1a5d92527aef3cfa728c55b
|
src/App/Header.js
|
javascript
|
cc0-1.0
|
Fix header alignment and silliness
|
import React from 'react';
import Paper from 'material-ui/lib/paper';
import { sideMargin, pageWidth } from 'data/layout';
export default () => (
<div>
<Paper rounded={false} zDepth={1} style={barStyle} />
<div style={centerContainerStyle}>
<Paper rounded={false} zDepth={2} style={logoStyle}>
<img src="logo.png" style={logoImageStyle} />
</Paper>
<span style={titleStyle}>
CAPTIVA License Decoder
</span>
</div>
</div>
);
const logoSide = 6;
const barHeight = 5;
const barStyle = {
backgroundColor: '#4e5052',
height: barHeight + 'rem',
width: '100%',
position: 'fixed',
left: 0,
top: 0,
zIndex: 1000
};
const centerContainerStyle = {
position: 'fixed',
top: 0,
left: sideMargin,
width: pageWidth,
margin: '0 auto',
fontSize: '2rem',
zIndex: 1001
};
const logoImageStyle = {
height: logoSide + 'rem',
width: logoSide + 'rem'
}
const logoStyle = {
backgroundColor: '#2c95dd',
position: 'fixed',
...logoImageStyle
};
const titleStyle = {
marginLeft: (logoSide + 1.5) + 'rem',
lineHeight: barHeight + 'rem',
color: 'white'
};
|
import React from 'react';
import Paper from 'material-ui/lib/paper';
import { sideMargin, pageWidth } from 'data/layout';
export default () => (
<Paper rounded={false} zDepth={1} style={barStyle}>
<div style={centerContainerStyle}>
<Paper rounded={false} zDepth={2} style={logoStyle}>
<img src="logo.png" style={logoImageStyle} />
</Paper>
<span style={titleStyle}>
CAPTIVA License Decoder
</span>
</div>
</Paper>
);
const barStyle = {
backgroundColor: '#4e5052',
height: '5rem',
width: '100%',
position: 'fixed',
left: 0,
top: 0,
zIndex: 1000
};
const centerContainerStyle = {
width: pageWidth,
margin: '0 auto',
zIndex: 1001
};
const logoStyle = {
backgroundColor: '#2c95dd',
display: 'inline-block',
fontSize: 0
};
const logoImageStyle = {
display: 'inline-block',
height: '6rem',
};
const titleStyle = {
display: 'inline-block',
verticalAlign: 'top',
marginLeft: '1.5rem',
fontSize: '2rem',
lineHeight: '5rem',
color: 'white'
};
| 15
| 20
| 6
|
mixed
|
--- a/src/App/Header.js
+++ b/src/App/Header.js
@@ -7,4 +7,3 @@
export default () => (
- <div>
- <Paper rounded={false} zDepth={1} style={barStyle} />
+ <Paper rounded={false} zDepth={1} style={barStyle}>
<div style={centerContainerStyle}>
@@ -17,7 +16,4 @@
</div>
- </div>
+ </Paper>
);
-
-const logoSide = 6;
-const barHeight = 5;
@@ -25,3 +21,3 @@
backgroundColor: '#4e5052',
- height: barHeight + 'rem',
+ height: '5rem',
width: '100%',
@@ -34,8 +30,4 @@
const centerContainerStyle = {
- position: 'fixed',
- top: 0,
- left: sideMargin,
width: pageWidth,
margin: '0 auto',
- fontSize: '2rem',
zIndex: 1001
@@ -43,11 +35,11 @@
-const logoImageStyle = {
- height: logoSide + 'rem',
- width: logoSide + 'rem'
-}
-
const logoStyle = {
backgroundColor: '#2c95dd',
- position: 'fixed',
- ...logoImageStyle
+ display: 'inline-block',
+ fontSize: 0
+};
+
+const logoImageStyle = {
+ display: 'inline-block',
+ height: '6rem',
};
@@ -55,4 +47,7 @@
const titleStyle = {
- marginLeft: (logoSide + 1.5) + 'rem',
- lineHeight: barHeight + 'rem',
+ display: 'inline-block',
+ verticalAlign: 'top',
+ marginLeft: '1.5rem',
+ fontSize: '2rem',
+ lineHeight: '5rem',
color: 'white'
|
--- a/src/App/Header.js
+++ b/src/App/Header.js
@@ ... @@
export default () => (
- <div>
- <Paper rounded={false} zDepth={1} style={barStyle} />
+ <Paper rounded={false} zDepth={1} style={barStyle}>
<div style={centerContainerStyle}>
@@ ... @@
</div>
- </div>
+ </Paper>
);
-
-const logoSide = 6;
-const barHeight = 5;
@@ ... @@
backgroundColor: '#4e5052',
- height: barHeight + 'rem',
+ height: '5rem',
width: '100%',
@@ ... @@
const centerContainerStyle = {
- position: 'fixed',
- top: 0,
- left: sideMargin,
width: pageWidth,
margin: '0 auto',
- fontSize: '2rem',
zIndex: 1001
@@ ... @@
-const logoImageStyle = {
- height: logoSide + 'rem',
- width: logoSide + 'rem'
-}
-
const logoStyle = {
backgroundColor: '#2c95dd',
- position: 'fixed',
- ...logoImageStyle
+ display: 'inline-block',
+ fontSize: 0
+};
+
+const logoImageStyle = {
+ display: 'inline-block',
+ height: '6rem',
};
@@ ... @@
const titleStyle = {
- marginLeft: (logoSide + 1.5) + 'rem',
- lineHeight: barHeight + 'rem',
+ display: 'inline-block',
+ verticalAlign: 'top',
+ marginLeft: '1.5rem',
+ fontSize: '2rem',
+ lineHeight: '5rem',
color: 'white'
|
--- a/src/App/Header.js
+++ b/src/App/Header.js
@@ -7,4 +7,3 @@
CON export default () => (
DEL <div>
DEL <Paper rounded={false} zDepth={1} style={barStyle} />
ADD <Paper rounded={false} zDepth={1} style={barStyle}>
CON <div style={centerContainerStyle}>
@@ -17,7 +16,4 @@
CON </div>
DEL </div>
ADD </Paper>
CON );
DEL
DEL const logoSide = 6;
DEL const barHeight = 5;
CON
@@ -25,3 +21,3 @@
CON backgroundColor: '#4e5052',
DEL height: barHeight + 'rem',
ADD height: '5rem',
CON width: '100%',
@@ -34,8 +30,4 @@
CON const centerContainerStyle = {
DEL position: 'fixed',
DEL top: 0,
DEL left: sideMargin,
CON width: pageWidth,
CON margin: '0 auto',
DEL fontSize: '2rem',
CON zIndex: 1001
@@ -43,11 +35,11 @@
CON
DEL const logoImageStyle = {
DEL height: logoSide + 'rem',
DEL width: logoSide + 'rem'
DEL }
DEL
CON const logoStyle = {
CON backgroundColor: '#2c95dd',
DEL position: 'fixed',
DEL ...logoImageStyle
ADD display: 'inline-block',
ADD fontSize: 0
ADD };
ADD
ADD const logoImageStyle = {
ADD display: 'inline-block',
ADD height: '6rem',
CON };
@@ -55,4 +47,7 @@
CON const titleStyle = {
DEL marginLeft: (logoSide + 1.5) + 'rem',
DEL lineHeight: barHeight + 'rem',
ADD display: 'inline-block',
ADD verticalAlign: 'top',
ADD marginLeft: '1.5rem',
ADD fontSize: '2rem',
ADD lineHeight: '5rem',
CON color: 'white'
|
<<<<<<< SEARCH
export default () => (
<div>
<Paper rounded={false} zDepth={1} style={barStyle} />
<div style={centerContainerStyle}>
<Paper rounded={false} zDepth={2} style={logoStyle}>
=======
export default () => (
<Paper rounded={false} zDepth={1} style={barStyle}>
<div style={centerContainerStyle}>
<Paper rounded={false} zDepth={2} style={logoStyle}>
>>>>>>> REPLACE
<<<<<<< SEARCH
</span>
</div>
</div>
);
const logoSide = 6;
const barHeight = 5;
const barStyle = {
backgroundColor: '#4e5052',
height: barHeight + 'rem',
width: '100%',
position: 'fixed',
=======
</span>
</div>
</Paper>
);
const barStyle = {
backgroundColor: '#4e5052',
height: '5rem',
width: '100%',
position: 'fixed',
>>>>>>> REPLACE
<<<<<<< SEARCH
const centerContainerStyle = {
position: 'fixed',
top: 0,
left: sideMargin,
width: pageWidth,
margin: '0 auto',
fontSize: '2rem',
zIndex: 1001
};
const logoImageStyle = {
height: logoSide + 'rem',
width: logoSide + 'rem'
}
const logoStyle = {
backgroundColor: '#2c95dd',
position: 'fixed',
...logoImageStyle
};
const titleStyle = {
marginLeft: (logoSide + 1.5) + 'rem',
lineHeight: barHeight + 'rem',
color: 'white'
};
=======
const centerContainerStyle = {
width: pageWidth,
margin: '0 auto',
zIndex: 1001
};
const logoStyle = {
backgroundColor: '#2c95dd',
display: 'inline-block',
fontSize: 0
};
const logoImageStyle = {
display: 'inline-block',
height: '6rem',
};
const titleStyle = {
display: 'inline-block',
verticalAlign: 'top',
marginLeft: '1.5rem',
fontSize: '2rem',
lineHeight: '5rem',
color: 'white'
};
>>>>>>> REPLACE
|
0xd4d/iced
|
015012dc3b909831e50e7793f261036f70e2ea6e
|
src/rust/iced-x86/src/iced_error.rs
|
rust
|
mit
|
Verify that IcedError is Send + Sync
|
// SPDX-License-Identifier: MIT
// Copyright (C) 2018-present iced project and contributors
use alloc::borrow::Cow;
use alloc::string::String;
use core::fmt;
#[cfg(feature = "std")]
use std::error::Error;
/// iced error
#[derive(Debug, Clone)]
pub struct IcedError {
error: Cow<'static, str>,
}
impl IcedError {
#[allow(dead_code)]
pub(crate) fn new(error: &'static str) -> Self {
Self { error: error.into() }
}
#[allow(dead_code)]
pub(crate) fn with_string(error: String) -> Self {
Self { error: error.into() }
}
}
#[cfg(feature = "std")]
impl Error for IcedError {
// Required since MSRV < 1.42.0
#[allow(clippy::missing_inline_in_public_items)]
fn description(&self) -> &str {
&self.error
}
}
impl fmt::Display for IcedError {
#[allow(clippy::missing_inline_in_public_items)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", &self.error)
}
}
|
// SPDX-License-Identifier: MIT
// Copyright (C) 2018-present iced project and contributors
use alloc::borrow::Cow;
use alloc::string::String;
use core::fmt;
#[cfg(feature = "std")]
use std::error::Error;
/// iced error
#[derive(Debug, Clone)]
pub struct IcedError {
error: Cow<'static, str>,
}
struct _TraitsCheck
where
IcedError: fmt::Debug + Clone + fmt::Display + Send + Sync;
#[cfg(feature = "std")]
struct _TraitsCheckStd
where
IcedError: Error;
impl IcedError {
#[allow(dead_code)]
pub(crate) fn new(error: &'static str) -> Self {
Self { error: error.into() }
}
#[allow(dead_code)]
pub(crate) fn with_string(error: String) -> Self {
Self { error: error.into() }
}
}
#[cfg(feature = "std")]
impl Error for IcedError {
// Required since MSRV < 1.42.0
#[allow(clippy::missing_inline_in_public_items)]
fn description(&self) -> &str {
&self.error
}
}
impl fmt::Display for IcedError {
#[allow(clippy::missing_inline_in_public_items)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", &self.error)
}
}
| 8
| 0
| 1
|
add_only
|
--- a/src/rust/iced-x86/src/iced_error.rs
+++ b/src/rust/iced-x86/src/iced_error.rs
@@ -14,2 +14,10 @@
}
+
+struct _TraitsCheck
+where
+ IcedError: fmt::Debug + Clone + fmt::Display + Send + Sync;
+#[cfg(feature = "std")]
+struct _TraitsCheckStd
+where
+ IcedError: Error;
|
--- a/src/rust/iced-x86/src/iced_error.rs
+++ b/src/rust/iced-x86/src/iced_error.rs
@@ ... @@
}
+
+struct _TraitsCheck
+where
+ IcedError: fmt::Debug + Clone + fmt::Display + Send + Sync;
+#[cfg(feature = "std")]
+struct _TraitsCheckStd
+where
+ IcedError: Error;
|
--- a/src/rust/iced-x86/src/iced_error.rs
+++ b/src/rust/iced-x86/src/iced_error.rs
@@ -14,2 +14,10 @@
CON }
ADD
ADD struct _TraitsCheck
ADD where
ADD IcedError: fmt::Debug + Clone + fmt::Display + Send + Sync;
ADD #[cfg(feature = "std")]
ADD struct _TraitsCheckStd
ADD where
ADD IcedError: Error;
CON
|
<<<<<<< SEARCH
error: Cow<'static, str>,
}
impl IcedError {
=======
error: Cow<'static, str>,
}
struct _TraitsCheck
where
IcedError: fmt::Debug + Clone + fmt::Display + Send + Sync;
#[cfg(feature = "std")]
struct _TraitsCheckStd
where
IcedError: Error;
impl IcedError {
>>>>>>> REPLACE
|
burnnat/grunt-sauce-driver
|
5961263d10fa445ee98bc364bd95a74b4a16550e
|
drivers/siesta.js
|
javascript
|
mit
|
Add some handling to auto-retry failed tests in Siesta.
This implementation will also click each failed test row to see the DOM
and failed assertions.
|
/**
* Selenium script for running Siesta unit tests.
*/
module.exports = function(browser, chain, options) {
var runButton = '.x-btn a[title="Run all"]';
chain
.waitForElementByCss(runButton, options.testReadyTimeout)
.elementByCss(runButton, function(err, el) {
browser.next('moveTo', el);
browser.next('clickElement', el);
})
.elementByCss('a.logo-link', function(err, el) {
browser.next('moveTo', el);
})
.waitForCondition('!!Siesta.my.activeHarness.endDate', options.testTimeout, options.testInterval)
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
if (obj) {
browser.saucePassed = obj.passed;
browser.sauceData = { siesta: obj };
}
});
};
|
/**
* Selenium script for running Siesta unit tests.
*/
module.exports = function(browser, chain, options) {
var button = function(title) {
return '.x-btn a[title="' + title + '"]';
};
var runAllButton = button('Run all');
var endCondition = '!!Siesta.my.activeHarness.endDate';
chain
.waitForElementByCss(runAllButton, options.testReadyTimeout)
.elementByCss(runAllButton, function(err, el) {
browser.next('clickElement', el);
browser.next('moveTo', el);
})
.elementByCss('a.logo-link', function(err, el) {
browser.next('moveTo', el);
})
.waitForCondition(endCondition, options.testTimeout, options.testInterval)
.elementsByCss('.tr-testgrid .x-grid-row .test-icon.icon-bug', function(err, els) {
// Note that browser.next() always unshifts to the start of the queue,
// so when adding multiple operations, the calls will be executed in the
// reverse order of what they appear here.
if (els.length > 0) {
browser.next('waitForCondition', endCondition, options.testTimeout, options.testInterval);
browser.next('elementByCss', button('Run failed'), function(err, el) {
browser.next('clickElement', el);
});
els.forEach(function(el) {
browser.next('clickElement', el);
browser.next('moveTo', el);
});
browser.next('execute', 'Siesta.my.activeHarness.endDate = null');
}
})
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
if (obj) {
browser.saucePassed = obj.passed;
browser.sauceData = { siesta: obj };
}
});
};
| 30
| 5
| 2
|
mixed
|
--- a/drivers/siesta.js
+++ b/drivers/siesta.js
@@ -4,9 +4,14 @@
module.exports = function(browser, chain, options) {
- var runButton = '.x-btn a[title="Run all"]';
+ var button = function(title) {
+ return '.x-btn a[title="' + title + '"]';
+ };
+
+ var runAllButton = button('Run all');
+ var endCondition = '!!Siesta.my.activeHarness.endDate';
chain
- .waitForElementByCss(runButton, options.testReadyTimeout)
- .elementByCss(runButton, function(err, el) {
+ .waitForElementByCss(runAllButton, options.testReadyTimeout)
+ .elementByCss(runAllButton, function(err, el) {
+ browser.next('clickElement', el);
browser.next('moveTo', el);
- browser.next('clickElement', el);
})
@@ -15,3 +20,23 @@
})
- .waitForCondition('!!Siesta.my.activeHarness.endDate', options.testTimeout, options.testInterval)
+ .waitForCondition(endCondition, options.testTimeout, options.testInterval)
+ .elementsByCss('.tr-testgrid .x-grid-row .test-icon.icon-bug', function(err, els) {
+ // Note that browser.next() always unshifts to the start of the queue,
+ // so when adding multiple operations, the calls will be executed in the
+ // reverse order of what they appear here.
+
+ if (els.length > 0) {
+ browser.next('waitForCondition', endCondition, options.testTimeout, options.testInterval);
+
+ browser.next('elementByCss', button('Run failed'), function(err, el) {
+ browser.next('clickElement', el);
+ });
+
+ els.forEach(function(el) {
+ browser.next('clickElement', el);
+ browser.next('moveTo', el);
+ });
+
+ browser.next('execute', 'Siesta.my.activeHarness.endDate = null');
+ }
+ })
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
|
--- a/drivers/siesta.js
+++ b/drivers/siesta.js
@@ ... @@
module.exports = function(browser, chain, options) {
- var runButton = '.x-btn a[title="Run all"]';
+ var button = function(title) {
+ return '.x-btn a[title="' + title + '"]';
+ };
+
+ var runAllButton = button('Run all');
+ var endCondition = '!!Siesta.my.activeHarness.endDate';
chain
- .waitForElementByCss(runButton, options.testReadyTimeout)
- .elementByCss(runButton, function(err, el) {
+ .waitForElementByCss(runAllButton, options.testReadyTimeout)
+ .elementByCss(runAllButton, function(err, el) {
+ browser.next('clickElement', el);
browser.next('moveTo', el);
- browser.next('clickElement', el);
})
@@ ... @@
})
- .waitForCondition('!!Siesta.my.activeHarness.endDate', options.testTimeout, options.testInterval)
+ .waitForCondition(endCondition, options.testTimeout, options.testInterval)
+ .elementsByCss('.tr-testgrid .x-grid-row .test-icon.icon-bug', function(err, els) {
+ // Note that browser.next() always unshifts to the start of the queue,
+ // so when adding multiple operations, the calls will be executed in the
+ // reverse order of what they appear here.
+
+ if (els.length > 0) {
+ browser.next('waitForCondition', endCondition, options.testTimeout, options.testInterval);
+
+ browser.next('elementByCss', button('Run failed'), function(err, el) {
+ browser.next('clickElement', el);
+ });
+
+ els.forEach(function(el) {
+ browser.next('clickElement', el);
+ browser.next('moveTo', el);
+ });
+
+ browser.next('execute', 'Siesta.my.activeHarness.endDate = null');
+ }
+ })
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
|
--- a/drivers/siesta.js
+++ b/drivers/siesta.js
@@ -4,9 +4,14 @@
CON module.exports = function(browser, chain, options) {
DEL var runButton = '.x-btn a[title="Run all"]';
ADD var button = function(title) {
ADD return '.x-btn a[title="' + title + '"]';
ADD };
ADD
ADD var runAllButton = button('Run all');
ADD var endCondition = '!!Siesta.my.activeHarness.endDate';
CON
CON chain
DEL .waitForElementByCss(runButton, options.testReadyTimeout)
DEL .elementByCss(runButton, function(err, el) {
ADD .waitForElementByCss(runAllButton, options.testReadyTimeout)
ADD .elementByCss(runAllButton, function(err, el) {
ADD browser.next('clickElement', el);
CON browser.next('moveTo', el);
DEL browser.next('clickElement', el);
CON })
@@ -15,3 +20,23 @@
CON })
DEL .waitForCondition('!!Siesta.my.activeHarness.endDate', options.testTimeout, options.testInterval)
ADD .waitForCondition(endCondition, options.testTimeout, options.testInterval)
ADD .elementsByCss('.tr-testgrid .x-grid-row .test-icon.icon-bug', function(err, els) {
ADD // Note that browser.next() always unshifts to the start of the queue,
ADD // so when adding multiple operations, the calls will be executed in the
ADD // reverse order of what they appear here.
ADD
ADD if (els.length > 0) {
ADD browser.next('waitForCondition', endCondition, options.testTimeout, options.testInterval);
ADD
ADD browser.next('elementByCss', button('Run failed'), function(err, el) {
ADD browser.next('clickElement', el);
ADD });
ADD
ADD els.forEach(function(el) {
ADD browser.next('clickElement', el);
ADD browser.next('moveTo', el);
ADD });
ADD
ADD browser.next('execute', 'Siesta.my.activeHarness.endDate = null');
ADD }
ADD })
CON .safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
|
<<<<<<< SEARCH
*/
module.exports = function(browser, chain, options) {
var runButton = '.x-btn a[title="Run all"]';
chain
.waitForElementByCss(runButton, options.testReadyTimeout)
.elementByCss(runButton, function(err, el) {
browser.next('moveTo', el);
browser.next('clickElement', el);
})
.elementByCss('a.logo-link', function(err, el) {
browser.next('moveTo', el);
})
.waitForCondition('!!Siesta.my.activeHarness.endDate', options.testTimeout, options.testInterval)
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
if (obj) {
=======
*/
module.exports = function(browser, chain, options) {
var button = function(title) {
return '.x-btn a[title="' + title + '"]';
};
var runAllButton = button('Run all');
var endCondition = '!!Siesta.my.activeHarness.endDate';
chain
.waitForElementByCss(runAllButton, options.testReadyTimeout)
.elementByCss(runAllButton, function(err, el) {
browser.next('clickElement', el);
browser.next('moveTo', el);
})
.elementByCss('a.logo-link', function(err, el) {
browser.next('moveTo', el);
})
.waitForCondition(endCondition, options.testTimeout, options.testInterval)
.elementsByCss('.tr-testgrid .x-grid-row .test-icon.icon-bug', function(err, els) {
// Note that browser.next() always unshifts to the start of the queue,
// so when adding multiple operations, the calls will be executed in the
// reverse order of what they appear here.
if (els.length > 0) {
browser.next('waitForCondition', endCondition, options.testTimeout, options.testInterval);
browser.next('elementByCss', button('Run failed'), function(err, el) {
browser.next('clickElement', el);
});
els.forEach(function(el) {
browser.next('clickElement', el);
browser.next('moveTo', el);
});
browser.next('execute', 'Siesta.my.activeHarness.endDate = null');
}
})
.safeEval("Siesta.REPORTER ? Siesta.my.activeHarness.generateReport() : null", function(err, obj) {
if (obj) {
>>>>>>> REPLACE
|
csperkins/crtp
|
6efacc72a63af7c8dd35c7e5dd77fe1a823c075d
|
src/main.rs
|
rust
|
bsd-2-clause
|
Add structs to represent RTCP packets (SR, RR, SDES, BYE)
|
fn main() {
println!("CRTP")
}
|
// ================================================================================================
type SSRC = u32;
type RtpTimestamp = u32;
type NtpTimestamp = u64;
#[deriving(Clone)]
struct SenderInfo {
ntp_ts : NtpTimestamp,
rtp_ts : RtpTimestamp,
pckt_count : u32,
byte_count : u32
}
#[deriving(Clone)]
struct ReportBlock {
ssrc : SSRC,
fract_lost : u8,
cumul_lost : u32,
ext_seq : u32,
jitter : u32,
lsr : u32,
dlsr : u32
}
#[deriving(Clone)]
struct SdesItem {
item_type : u8,
item_text : String
}
#[deriving(Clone)]
struct SdesChunk {
ssrc : SSRC,
items : Vec<SdesItem>
}
#[deriving(Clone)]
enum PacketRTCP {
PacketSR(SSRC, Vec<ReportBlock>, SenderInfo),
PacketRR(SSRC, Vec<ReportBlock>),
PacketSDES(Vec<SdesChunk>),
PacketBye(Vec<SSRC>, String),
}
#[deriving(Clone)]
enum Packet {
PacketRTP,
PacketCompoundRTCP(Vec<PacketRTCP>)
}
// ================================================================================================
fn main() {
println!("CRTP")
}
// ================================================================================================
// vim: set ts=2 sw=2 tw=0 et ai:
| 54
| 0
| 2
|
add_only
|
--- a/src/main.rs
+++ b/src/main.rs
@@ -1 +1,53 @@
+// ================================================================================================
+
+type SSRC = u32;
+type RtpTimestamp = u32;
+type NtpTimestamp = u64;
+
+#[deriving(Clone)]
+struct SenderInfo {
+ ntp_ts : NtpTimestamp,
+ rtp_ts : RtpTimestamp,
+ pckt_count : u32,
+ byte_count : u32
+}
+
+#[deriving(Clone)]
+struct ReportBlock {
+ ssrc : SSRC,
+ fract_lost : u8,
+ cumul_lost : u32,
+ ext_seq : u32,
+ jitter : u32,
+ lsr : u32,
+ dlsr : u32
+}
+
+#[deriving(Clone)]
+struct SdesItem {
+ item_type : u8,
+ item_text : String
+}
+
+#[deriving(Clone)]
+struct SdesChunk {
+ ssrc : SSRC,
+ items : Vec<SdesItem>
+}
+
+#[deriving(Clone)]
+enum PacketRTCP {
+ PacketSR(SSRC, Vec<ReportBlock>, SenderInfo),
+ PacketRR(SSRC, Vec<ReportBlock>),
+ PacketSDES(Vec<SdesChunk>),
+ PacketBye(Vec<SSRC>, String),
+}
+
+#[deriving(Clone)]
+enum Packet {
+ PacketRTP,
+ PacketCompoundRTCP(Vec<PacketRTCP>)
+}
+
+// ================================================================================================
@@ -5 +57,3 @@
+// ================================================================================================
+// vim: set ts=2 sw=2 tw=0 et ai:
|
--- a/src/main.rs
+++ b/src/main.rs
@@ ... @@
+// ================================================================================================
+
+type SSRC = u32;
+type RtpTimestamp = u32;
+type NtpTimestamp = u64;
+
+#[deriving(Clone)]
+struct SenderInfo {
+ ntp_ts : NtpTimestamp,
+ rtp_ts : RtpTimestamp,
+ pckt_count : u32,
+ byte_count : u32
+}
+
+#[deriving(Clone)]
+struct ReportBlock {
+ ssrc : SSRC,
+ fract_lost : u8,
+ cumul_lost : u32,
+ ext_seq : u32,
+ jitter : u32,
+ lsr : u32,
+ dlsr : u32
+}
+
+#[deriving(Clone)]
+struct SdesItem {
+ item_type : u8,
+ item_text : String
+}
+
+#[deriving(Clone)]
+struct SdesChunk {
+ ssrc : SSRC,
+ items : Vec<SdesItem>
+}
+
+#[deriving(Clone)]
+enum PacketRTCP {
+ PacketSR(SSRC, Vec<ReportBlock>, SenderInfo),
+ PacketRR(SSRC, Vec<ReportBlock>),
+ PacketSDES(Vec<SdesChunk>),
+ PacketBye(Vec<SSRC>, String),
+}
+
+#[deriving(Clone)]
+enum Packet {
+ PacketRTP,
+ PacketCompoundRTCP(Vec<PacketRTCP>)
+}
+
+// ================================================================================================
@@ ... @@
+// ================================================================================================
+// vim: set ts=2 sw=2 tw=0 et ai:
|
--- a/src/main.rs
+++ b/src/main.rs
@@ -1 +1,53 @@
ADD // ================================================================================================
ADD
ADD type SSRC = u32;
ADD type RtpTimestamp = u32;
ADD type NtpTimestamp = u64;
ADD
ADD #[deriving(Clone)]
ADD struct SenderInfo {
ADD ntp_ts : NtpTimestamp,
ADD rtp_ts : RtpTimestamp,
ADD pckt_count : u32,
ADD byte_count : u32
ADD }
ADD
ADD #[deriving(Clone)]
ADD struct ReportBlock {
ADD ssrc : SSRC,
ADD fract_lost : u8,
ADD cumul_lost : u32,
ADD ext_seq : u32,
ADD jitter : u32,
ADD lsr : u32,
ADD dlsr : u32
ADD }
ADD
ADD #[deriving(Clone)]
ADD struct SdesItem {
ADD item_type : u8,
ADD item_text : String
ADD }
ADD
ADD #[deriving(Clone)]
ADD struct SdesChunk {
ADD ssrc : SSRC,
ADD items : Vec<SdesItem>
ADD }
ADD
ADD #[deriving(Clone)]
ADD enum PacketRTCP {
ADD PacketSR(SSRC, Vec<ReportBlock>, SenderInfo),
ADD PacketRR(SSRC, Vec<ReportBlock>),
ADD PacketSDES(Vec<SdesChunk>),
ADD PacketBye(Vec<SSRC>, String),
ADD }
ADD
ADD #[deriving(Clone)]
ADD enum Packet {
ADD PacketRTP,
ADD PacketCompoundRTCP(Vec<PacketRTCP>)
ADD }
ADD
ADD // ================================================================================================
CON
@@ -5 +57,3 @@
CON
ADD // ================================================================================================
ADD // vim: set ts=2 sw=2 tw=0 et ai:
|
<<<<<<< SEARCH
fn main() {
println!("CRTP")
}
=======
// ================================================================================================
type SSRC = u32;
type RtpTimestamp = u32;
type NtpTimestamp = u64;
#[deriving(Clone)]
struct SenderInfo {
ntp_ts : NtpTimestamp,
rtp_ts : RtpTimestamp,
pckt_count : u32,
byte_count : u32
}
#[deriving(Clone)]
struct ReportBlock {
ssrc : SSRC,
fract_lost : u8,
cumul_lost : u32,
ext_seq : u32,
jitter : u32,
lsr : u32,
dlsr : u32
}
#[deriving(Clone)]
struct SdesItem {
item_type : u8,
item_text : String
}
#[deriving(Clone)]
struct SdesChunk {
ssrc : SSRC,
items : Vec<SdesItem>
}
#[deriving(Clone)]
enum PacketRTCP {
PacketSR(SSRC, Vec<ReportBlock>, SenderInfo),
PacketRR(SSRC, Vec<ReportBlock>),
PacketSDES(Vec<SdesChunk>),
PacketBye(Vec<SSRC>, String),
}
#[deriving(Clone)]
enum Packet {
PacketRTP,
PacketCompoundRTCP(Vec<PacketRTCP>)
}
// ================================================================================================
fn main() {
println!("CRTP")
}
// ================================================================================================
// vim: set ts=2 sw=2 tw=0 et ai:
>>>>>>> REPLACE
|
janis-kra/dropbox-fetch
|
7b00cb9900a4286b6dc4204660a1f16f59bae321
|
dropbox-fetch.js
|
javascript
|
mit
|
Return the authorization url when `authorize` fails
s.t. the user can manually authorize and set the token in `config.js`
|
/**
* The authorization token with which calls to the API are made.
* @type {String}
*/
let token = '';
/**
* Authorize via OAuth 2.0 for Dropbox API calls.
*
* @parameter {string} clientId your app's key
* @parameter {string} redirectUri the uri where the user should be redirected
* to, after authorization has completed
* @return {function} a promise that resolves or fails both with the returned
* HTTP status code
*/
const authorize = (clientId, redirectUri = '') => {
return new Promise((resolve, reject) => {
reject('not implemented yet');
});
};
/**
* Set the token that is used for all Dropbox API calls to the given value.
* @param {string} t The new token value.
*/
const setToken = (t) => {
if (typeof t !== 'string') {
throw new Error('invalid argument ' + t + ' (expected: string)');
}
token = t;
};
module.exports = {
authorize,
setToken
};
|
/**
* The authorization token with which calls to the API are made.
* @type {String}
*/
let token = '';
const AUTHORIZE_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize';
const getAuthorizationUrl = (clientId) => {
return AUTHORIZE_ENDPOINT + '?' +
'response_type=token' +
'client_id=' + clientId;
};
/**
* Authorize via OAuth 2.0 for Dropbox API calls.
*
* @parameter {string} clientId your app's key
* @parameter {string} redirectUri the uri where the user should be redirected
* to, after authorization has completed
* @return {function} a promise that resolves or fails both with the returned
* HTTP status code
*/
const authorize = (clientId, redirectUri = '') => {
return new Promise((resolve, reject) => {
reject('Not implemented yet, please obtain a token manually by calling ' +
getAuthorizationUrl(clientId));
});
};
/**
* Set the token that is used for all Dropbox API calls to the given value.
* @param {string} t The new token value.
*/
const setToken = (t) => {
if (typeof t !== 'string') {
throw new Error('invalid argument ' + t + ' (expected: string)');
}
token = t;
};
module.exports = {
authorize,
setToken
};
| 10
| 1
| 2
|
mixed
|
--- a/dropbox-fetch.js
+++ b/dropbox-fetch.js
@@ -5,2 +5,10 @@
let token = '';
+
+const AUTHORIZE_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize';
+
+const getAuthorizationUrl = (clientId) => {
+ return AUTHORIZE_ENDPOINT + '?' +
+ 'response_type=token' +
+ 'client_id=' + clientId;
+};
@@ -17,3 +25,4 @@
return new Promise((resolve, reject) => {
- reject('not implemented yet');
+ reject('Not implemented yet, please obtain a token manually by calling ' +
+ getAuthorizationUrl(clientId));
});
|
--- a/dropbox-fetch.js
+++ b/dropbox-fetch.js
@@ ... @@
let token = '';
+
+const AUTHORIZE_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize';
+
+const getAuthorizationUrl = (clientId) => {
+ return AUTHORIZE_ENDPOINT + '?' +
+ 'response_type=token' +
+ 'client_id=' + clientId;
+};
@@ ... @@
return new Promise((resolve, reject) => {
- reject('not implemented yet');
+ reject('Not implemented yet, please obtain a token manually by calling ' +
+ getAuthorizationUrl(clientId));
});
|
--- a/dropbox-fetch.js
+++ b/dropbox-fetch.js
@@ -5,2 +5,10 @@
CON let token = '';
ADD
ADD const AUTHORIZE_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize';
ADD
ADD const getAuthorizationUrl = (clientId) => {
ADD return AUTHORIZE_ENDPOINT + '?' +
ADD 'response_type=token' +
ADD 'client_id=' + clientId;
ADD };
CON
@@ -17,3 +25,4 @@
CON return new Promise((resolve, reject) => {
DEL reject('not implemented yet');
ADD reject('Not implemented yet, please obtain a token manually by calling ' +
ADD getAuthorizationUrl(clientId));
CON });
|
<<<<<<< SEARCH
*/
let token = '';
/**
=======
*/
let token = '';
const AUTHORIZE_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize';
const getAuthorizationUrl = (clientId) => {
return AUTHORIZE_ENDPOINT + '?' +
'response_type=token' +
'client_id=' + clientId;
};
/**
>>>>>>> REPLACE
<<<<<<< SEARCH
const authorize = (clientId, redirectUri = '') => {
return new Promise((resolve, reject) => {
reject('not implemented yet');
});
};
=======
const authorize = (clientId, redirectUri = '') => {
return new Promise((resolve, reject) => {
reject('Not implemented yet, please obtain a token manually by calling ' +
getAuthorizationUrl(clientId));
});
};
>>>>>>> REPLACE
|
kryptnostic/rhizome
|
61c53b5c19cbe54179b018f885622a2878718251
|
src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
|
java
|
apache-2.0
|
Fix NPE if no auth info provided
|
package digital.loom.rhizome.authentication;
import java.util.regex.Pattern;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import com.auth0.jwt.internal.org.apache.commons.lang3.StringUtils;
import com.auth0.spring.security.api.Auth0AuthenticationFilter;
import com.google.common.base.MoreObjects;
public class CookieReadingAuth0AuthenticationFilter extends Auth0AuthenticationFilter {
@Override
protected String getToken( HttpServletRequest httpRequest ) {
String authorizationCookie = null;
Cookie[] cookies = httpRequest.getCookies();
if ( cookies != null ) {
for ( Cookie cookie : httpRequest.getCookies() ) {
if ( StringUtils.equals( cookie.getName(), "authorization" ) ) {
authorizationCookie = cookie.getValue();
break;
}
}
}
final String authorizationHeader = httpRequest.getHeader( "authorization" );
final String[] parts = MoreObjects.firstNonNull( authorizationHeader, authorizationCookie ).split( " " );
if ( parts.length != 2 ) {
// "Unauthorized: Format is Authorization: Bearer [token]"
return null;
}
final String scheme = parts[ 0 ];
final String credentials = parts[ 1 ];
final Pattern pattern = Pattern.compile( "^Bearer$", Pattern.CASE_INSENSITIVE );
return pattern.matcher( scheme ).matches() ? credentials : null;
}
}
|
package digital.loom.rhizome.authentication;
import java.util.regex.Pattern;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import com.auth0.jwt.internal.org.apache.commons.lang3.StringUtils;
import com.auth0.spring.security.api.Auth0AuthenticationFilter;
import com.google.common.base.MoreObjects;
public class CookieReadingAuth0AuthenticationFilter extends Auth0AuthenticationFilter {
@Override
protected String getToken( HttpServletRequest httpRequest ) {
String authorizationCookie = null;
Cookie[] cookies = httpRequest.getCookies();
if ( cookies != null ) {
for ( Cookie cookie : httpRequest.getCookies() ) {
if ( StringUtils.equals( cookie.getName(), "authorization" ) ) {
authorizationCookie = cookie.getValue();
break;
}
}
}
final String authorizationInfo = MoreObjects.firstNonNull( httpRequest.getHeader( "authorization" ), authorizationCookie );
if( authorizationInfo == null ) {
return null;
}
final String[] parts = authorizationInfo.split( " " );
if ( parts.length != 2 ) {
// "Unauthorized: Format is Authorization: Bearer [token]"
return null;
}
final String scheme = parts[ 0 ];
final String credentials = parts[ 1 ];
final Pattern pattern = Pattern.compile( "^Bearer$", Pattern.CASE_INSENSITIVE );
return pattern.matcher( scheme ).matches() ? credentials : null;
}
}
| 6
| 3
| 1
|
mixed
|
--- a/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
+++ b/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
@@ -25,5 +25,8 @@
- final String authorizationHeader = httpRequest.getHeader( "authorization" );
-
- final String[] parts = MoreObjects.firstNonNull( authorizationHeader, authorizationCookie ).split( " " );
+ final String authorizationInfo = MoreObjects.firstNonNull( httpRequest.getHeader( "authorization" ), authorizationCookie );
+ if( authorizationInfo == null ) {
+ return null;
+ }
+ final String[] parts = authorizationInfo.split( " " );
+
if ( parts.length != 2 ) {
|
--- a/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
+++ b/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
@@ ... @@
- final String authorizationHeader = httpRequest.getHeader( "authorization" );
-
- final String[] parts = MoreObjects.firstNonNull( authorizationHeader, authorizationCookie ).split( " " );
+ final String authorizationInfo = MoreObjects.firstNonNull( httpRequest.getHeader( "authorization" ), authorizationCookie );
+ if( authorizationInfo == null ) {
+ return null;
+ }
+ final String[] parts = authorizationInfo.split( " " );
+
if ( parts.length != 2 ) {
|
--- a/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
+++ b/src/main/java/digital/loom/rhizome/authentication/CookieReadingAuth0AuthenticationFilter.java
@@ -25,5 +25,8 @@
CON
DEL final String authorizationHeader = httpRequest.getHeader( "authorization" );
DEL
DEL final String[] parts = MoreObjects.firstNonNull( authorizationHeader, authorizationCookie ).split( " " );
ADD final String authorizationInfo = MoreObjects.firstNonNull( httpRequest.getHeader( "authorization" ), authorizationCookie );
ADD if( authorizationInfo == null ) {
ADD return null;
ADD }
ADD final String[] parts = authorizationInfo.split( " " );
ADD
CON if ( parts.length != 2 ) {
|
<<<<<<< SEARCH
}
final String authorizationHeader = httpRequest.getHeader( "authorization" );
final String[] parts = MoreObjects.firstNonNull( authorizationHeader, authorizationCookie ).split( " " );
if ( parts.length != 2 ) {
// "Unauthorized: Format is Authorization: Bearer [token]"
=======
}
final String authorizationInfo = MoreObjects.firstNonNull( httpRequest.getHeader( "authorization" ), authorizationCookie );
if( authorizationInfo == null ) {
return null;
}
final String[] parts = authorizationInfo.split( " " );
if ( parts.length != 2 ) {
// "Unauthorized: Format is Authorization: Bearer [token]"
>>>>>>> REPLACE
|
premkumarbalu/scsb-etl
|
baed73a969b8f255093b71c94d4c4a4e0af426c7
|
src/main/java/org/recap/route/XMLFileLoadValidator.java
|
java
|
apache-2.0
|
Check to validated xml file has been loaded already; if so, dont load it again.
|
package org.recap.route;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.impl.DefaultMessage;
import org.recap.model.jpa.ReportEntity;
import org.recap.repository.ReportDetailRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* Created by peris on 8/20/16.
*/
@Component
public class XMLFileLoadValidator implements Processor {
@Autowired
ReportDetailRepository reportDetailRepository;
@Override
public void process(Exchange exchange) throws Exception {
String camelFileName = (String) exchange.getIn().getHeader("CamelFileName");
List<ReportEntity> reportEntity =
reportDetailRepository.findByFileName(camelFileName);
if(!CollectionUtils.isEmpty(reportEntity)){
DefaultMessage defaultMessage = new DefaultMessage();
defaultMessage.setBody("");
exchange.setIn(defaultMessage);
exchange.setOut(defaultMessage);
}
}
}
|
package org.recap.route;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.impl.DefaultMessage;
import org.recap.model.jpa.ReportEntity;
import org.recap.repository.ReportDetailRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* Created by peris on 8/20/16.
*/
@Component
public class XMLFileLoadValidator implements Processor {
@Autowired
ReportDetailRepository reportDetailRepository;
/**
* Check to see if the xml file has been loaded already. If so, set empty body such that the file doesn't get
* processed again.
* @param exchange
* @throws Exception
*/
@Override
public void process(Exchange exchange) throws Exception {
String camelFileName = (String) exchange.getIn().getHeader("CamelFileName");
List<ReportEntity> reportEntity =
reportDetailRepository.findByFileName(camelFileName);
if(!CollectionUtils.isEmpty(reportEntity)){
DefaultMessage defaultMessage = new DefaultMessage();
defaultMessage.setBody("");
exchange.setIn(defaultMessage);
exchange.setOut(defaultMessage);
}
}
}
| 6
| 0
| 1
|
add_only
|
--- a/src/main/java/org/recap/route/XMLFileLoadValidator.java
+++ b/src/main/java/org/recap/route/XMLFileLoadValidator.java
@@ -22,2 +22,8 @@
+ /**
+ * Check to see if the xml file has been loaded already. If so, set empty body such that the file doesn't get
+ * processed again.
+ * @param exchange
+ * @throws Exception
+ */
@Override
|
--- a/src/main/java/org/recap/route/XMLFileLoadValidator.java
+++ b/src/main/java/org/recap/route/XMLFileLoadValidator.java
@@ ... @@
+ /**
+ * Check to see if the xml file has been loaded already. If so, set empty body such that the file doesn't get
+ * processed again.
+ * @param exchange
+ * @throws Exception
+ */
@Override
|
--- a/src/main/java/org/recap/route/XMLFileLoadValidator.java
+++ b/src/main/java/org/recap/route/XMLFileLoadValidator.java
@@ -22,2 +22,8 @@
CON
ADD /**
ADD * Check to see if the xml file has been loaded already. If so, set empty body such that the file doesn't get
ADD * processed again.
ADD * @param exchange
ADD * @throws Exception
ADD */
CON @Override
|
<<<<<<< SEARCH
ReportDetailRepository reportDetailRepository;
@Override
public void process(Exchange exchange) throws Exception {
=======
ReportDetailRepository reportDetailRepository;
/**
* Check to see if the xml file has been loaded already. If so, set empty body such that the file doesn't get
* processed again.
* @param exchange
* @throws Exception
*/
@Override
public void process(Exchange exchange) throws Exception {
>>>>>>> REPLACE
|
graydon/rust
|
1b681d6652bacce6b741ca66725f25b9afb16bc8
|
src/test/ui/if-attrs/cfg-false-if-attr.rs
|
rust
|
apache-2.0
|
Test that cfg-gated if-exprs are not type-checked
|
// check-pass
#[cfg(FALSE)]
fn simple_attr() {
#[attr] if true {}
#[allow_warnings] if true {}
}
#[cfg(FALSE)]
fn if_else_chain() {
#[first_attr] if true {
} else if false {
} else {
}
}
#[cfg(FALSE)]
fn if_let() {
#[attr] if let Some(_) = Some(true) {}
}
macro_rules! custom_macro {
($expr:expr) => {}
}
custom_macro! {
#[attr] if true {}
}
fn main() {}
|
// check-pass
#[cfg(FALSE)]
fn simple_attr() {
#[attr] if true {}
#[allow_warnings] if true {}
}
#[cfg(FALSE)]
fn if_else_chain() {
#[first_attr] if true {
} else if false {
} else {
}
}
#[cfg(FALSE)]
fn if_let() {
#[attr] if let Some(_) = Some(true) {}
}
fn bar() {
#[cfg(FALSE)]
if true {
let x: () = true; // Should not error due to the #[cfg(FALSE)]
}
#[cfg_attr(not(unset_attr), cfg(FALSE))]
if true {
let a: () = true; // Should not error due to the applied #[cfg(FALSE)]
}
}
macro_rules! custom_macro {
($expr:expr) => {}
}
custom_macro! {
#[attr] if true {}
}
fn main() {}
| 12
| 0
| 1
|
add_only
|
--- a/src/test/ui/if-attrs/cfg-false-if-attr.rs
+++ b/src/test/ui/if-attrs/cfg-false-if-attr.rs
@@ -21,2 +21,14 @@
+fn bar() {
+ #[cfg(FALSE)]
+ if true {
+ let x: () = true; // Should not error due to the #[cfg(FALSE)]
+ }
+
+ #[cfg_attr(not(unset_attr), cfg(FALSE))]
+ if true {
+ let a: () = true; // Should not error due to the applied #[cfg(FALSE)]
+ }
+}
+
macro_rules! custom_macro {
|
--- a/src/test/ui/if-attrs/cfg-false-if-attr.rs
+++ b/src/test/ui/if-attrs/cfg-false-if-attr.rs
@@ ... @@
+fn bar() {
+ #[cfg(FALSE)]
+ if true {
+ let x: () = true; // Should not error due to the #[cfg(FALSE)]
+ }
+
+ #[cfg_attr(not(unset_attr), cfg(FALSE))]
+ if true {
+ let a: () = true; // Should not error due to the applied #[cfg(FALSE)]
+ }
+}
+
macro_rules! custom_macro {
|
--- a/src/test/ui/if-attrs/cfg-false-if-attr.rs
+++ b/src/test/ui/if-attrs/cfg-false-if-attr.rs
@@ -21,2 +21,14 @@
CON
ADD fn bar() {
ADD #[cfg(FALSE)]
ADD if true {
ADD let x: () = true; // Should not error due to the #[cfg(FALSE)]
ADD }
ADD
ADD #[cfg_attr(not(unset_attr), cfg(FALSE))]
ADD if true {
ADD let a: () = true; // Should not error due to the applied #[cfg(FALSE)]
ADD }
ADD }
ADD
CON macro_rules! custom_macro {
|
<<<<<<< SEARCH
}
macro_rules! custom_macro {
($expr:expr) => {}
=======
}
fn bar() {
#[cfg(FALSE)]
if true {
let x: () = true; // Should not error due to the #[cfg(FALSE)]
}
#[cfg_attr(not(unset_attr), cfg(FALSE))]
if true {
let a: () = true; // Should not error due to the applied #[cfg(FALSE)]
}
}
macro_rules! custom_macro {
($expr:expr) => {}
>>>>>>> REPLACE
|
AcornUI/Acorn
|
82b02fa0f86aac1dea7b91971af341f7a5111bbf
|
acornui-spine/build.gradle.kts
|
kotlin
|
apache-2.0
|
Refactor acornui-spine to use 'basic' plugin
|
/*
* Copyright 2019 PolyForest
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
plugins {
kotlin("multiplatform")
`maven-publish`
}
val KOTLIN_LANGUAGE_VERSION: String by extra
val KOTLIN_JVM_TARGET: String by extra
kotlin {
js {
compilations.all {
kotlinOptions {
moduleKind = "amd"
sourceMap = true
sourceMapEmbedSources = "always"
main = "noCall"
}
}
}
jvm {
compilations.all {
kotlinOptions {
jvmTarget = KOTLIN_JVM_TARGET
}
}
}
targets.all {
compilations.all {
kotlinOptions {
languageVersion = KOTLIN_LANGUAGE_VERSION
apiVersion = KOTLIN_LANGUAGE_VERSION
verbose = true
}
}
}
sourceSets {
commonMain {
dependencies {
implementation(kotlin("stdlib-common"))
implementation(project(":acornui-core"))
implementation(project(":acornui-utils"))
}
}
commonTest {
dependencies {
implementation(kotlin("test-common"))
implementation(kotlin("test-annotations-common"))
}
}
named("jvmMain") {
dependencies {
implementation(kotlin("stdlib-jdk8"))
}
}
named("jvmTest") {
dependencies {
implementation(kotlin("test"))
implementation(kotlin("test-junit"))
}
}
named("jsMain") {
dependencies {
implementation(kotlin("stdlib-js"))
}
}
named("jsTest") {
dependencies {
implementation(kotlin("test-js"))
}
}
}
}
|
/*
* Copyright 2019 PolyForest
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
plugins {
id("com.polyforest.acornui.basic")
`maven-publish`
}
kotlin {
sourceSets {
commonMain {
dependencies {
implementation(project(":acornui-core"))
implementation(project(":acornui-utils"))
}
}
}
}
| 1
| 57
| 4
|
mixed
|
--- a/acornui-spine/build.gradle.kts
+++ b/acornui-spine/build.gradle.kts
@@ -17,3 +17,3 @@
plugins {
- kotlin("multiplatform")
+ id("com.polyforest.acornui.basic")
`maven-publish`
@@ -21,31 +21,3 @@
-val KOTLIN_LANGUAGE_VERSION: String by extra
-val KOTLIN_JVM_TARGET: String by extra
kotlin {
- js {
- compilations.all {
- kotlinOptions {
- moduleKind = "amd"
- sourceMap = true
- sourceMapEmbedSources = "always"
- main = "noCall"
- }
- }
- }
- jvm {
- compilations.all {
- kotlinOptions {
- jvmTarget = KOTLIN_JVM_TARGET
- }
- }
- }
- targets.all {
- compilations.all {
- kotlinOptions {
- languageVersion = KOTLIN_LANGUAGE_VERSION
- apiVersion = KOTLIN_LANGUAGE_VERSION
- verbose = true
- }
- }
- }
sourceSets {
@@ -53,3 +25,2 @@
dependencies {
- implementation(kotlin("stdlib-common"))
implementation(project(":acornui-core"))
@@ -58,29 +29,2 @@
}
- commonTest {
- dependencies {
- implementation(kotlin("test-common"))
- implementation(kotlin("test-annotations-common"))
- }
- }
- named("jvmMain") {
- dependencies {
- implementation(kotlin("stdlib-jdk8"))
- }
- }
- named("jvmTest") {
- dependencies {
- implementation(kotlin("test"))
- implementation(kotlin("test-junit"))
- }
- }
- named("jsMain") {
- dependencies {
- implementation(kotlin("stdlib-js"))
- }
- }
- named("jsTest") {
- dependencies {
- implementation(kotlin("test-js"))
- }
- }
}
|
--- a/acornui-spine/build.gradle.kts
+++ b/acornui-spine/build.gradle.kts
@@ ... @@
plugins {
- kotlin("multiplatform")
+ id("com.polyforest.acornui.basic")
`maven-publish`
@@ ... @@
-val KOTLIN_LANGUAGE_VERSION: String by extra
-val KOTLIN_JVM_TARGET: String by extra
kotlin {
- js {
- compilations.all {
- kotlinOptions {
- moduleKind = "amd"
- sourceMap = true
- sourceMapEmbedSources = "always"
- main = "noCall"
- }
- }
- }
- jvm {
- compilations.all {
- kotlinOptions {
- jvmTarget = KOTLIN_JVM_TARGET
- }
- }
- }
- targets.all {
- compilations.all {
- kotlinOptions {
- languageVersion = KOTLIN_LANGUAGE_VERSION
- apiVersion = KOTLIN_LANGUAGE_VERSION
- verbose = true
- }
- }
- }
sourceSets {
@@ ... @@
dependencies {
- implementation(kotlin("stdlib-common"))
implementation(project(":acornui-core"))
@@ ... @@
}
- commonTest {
- dependencies {
- implementation(kotlin("test-common"))
- implementation(kotlin("test-annotations-common"))
- }
- }
- named("jvmMain") {
- dependencies {
- implementation(kotlin("stdlib-jdk8"))
- }
- }
- named("jvmTest") {
- dependencies {
- implementation(kotlin("test"))
- implementation(kotlin("test-junit"))
- }
- }
- named("jsMain") {
- dependencies {
- implementation(kotlin("stdlib-js"))
- }
- }
- named("jsTest") {
- dependencies {
- implementation(kotlin("test-js"))
- }
- }
}
|
--- a/acornui-spine/build.gradle.kts
+++ b/acornui-spine/build.gradle.kts
@@ -17,3 +17,3 @@
CON plugins {
DEL kotlin("multiplatform")
ADD id("com.polyforest.acornui.basic")
CON `maven-publish`
@@ -21,31 +21,3 @@
CON
DEL val KOTLIN_LANGUAGE_VERSION: String by extra
DEL val KOTLIN_JVM_TARGET: String by extra
CON kotlin {
DEL js {
DEL compilations.all {
DEL kotlinOptions {
DEL moduleKind = "amd"
DEL sourceMap = true
DEL sourceMapEmbedSources = "always"
DEL main = "noCall"
DEL }
DEL }
DEL }
DEL jvm {
DEL compilations.all {
DEL kotlinOptions {
DEL jvmTarget = KOTLIN_JVM_TARGET
DEL }
DEL }
DEL }
DEL targets.all {
DEL compilations.all {
DEL kotlinOptions {
DEL languageVersion = KOTLIN_LANGUAGE_VERSION
DEL apiVersion = KOTLIN_LANGUAGE_VERSION
DEL verbose = true
DEL }
DEL }
DEL }
CON sourceSets {
@@ -53,3 +25,2 @@
CON dependencies {
DEL implementation(kotlin("stdlib-common"))
CON implementation(project(":acornui-core"))
@@ -58,29 +29,2 @@
CON }
DEL commonTest {
DEL dependencies {
DEL implementation(kotlin("test-common"))
DEL implementation(kotlin("test-annotations-common"))
DEL }
DEL }
DEL named("jvmMain") {
DEL dependencies {
DEL implementation(kotlin("stdlib-jdk8"))
DEL }
DEL }
DEL named("jvmTest") {
DEL dependencies {
DEL implementation(kotlin("test"))
DEL implementation(kotlin("test-junit"))
DEL }
DEL }
DEL named("jsMain") {
DEL dependencies {
DEL implementation(kotlin("stdlib-js"))
DEL }
DEL }
DEL named("jsTest") {
DEL dependencies {
DEL implementation(kotlin("test-js"))
DEL }
DEL }
CON }
|
<<<<<<< SEARCH
plugins {
kotlin("multiplatform")
`maven-publish`
}
val KOTLIN_LANGUAGE_VERSION: String by extra
val KOTLIN_JVM_TARGET: String by extra
kotlin {
js {
compilations.all {
kotlinOptions {
moduleKind = "amd"
sourceMap = true
sourceMapEmbedSources = "always"
main = "noCall"
}
}
}
jvm {
compilations.all {
kotlinOptions {
jvmTarget = KOTLIN_JVM_TARGET
}
}
}
targets.all {
compilations.all {
kotlinOptions {
languageVersion = KOTLIN_LANGUAGE_VERSION
apiVersion = KOTLIN_LANGUAGE_VERSION
verbose = true
}
}
}
sourceSets {
commonMain {
dependencies {
implementation(kotlin("stdlib-common"))
implementation(project(":acornui-core"))
implementation(project(":acornui-utils"))
}
}
commonTest {
dependencies {
implementation(kotlin("test-common"))
implementation(kotlin("test-annotations-common"))
}
}
named("jvmMain") {
dependencies {
implementation(kotlin("stdlib-jdk8"))
}
}
named("jvmTest") {
dependencies {
implementation(kotlin("test"))
implementation(kotlin("test-junit"))
}
}
named("jsMain") {
dependencies {
implementation(kotlin("stdlib-js"))
}
}
named("jsTest") {
dependencies {
implementation(kotlin("test-js"))
}
}
}
}
=======
plugins {
id("com.polyforest.acornui.basic")
`maven-publish`
}
kotlin {
sourceSets {
commonMain {
dependencies {
implementation(project(":acornui-core"))
implementation(project(":acornui-utils"))
}
}
}
}
>>>>>>> REPLACE
|
lise-henry/crowbook
|
9758f93d932ef8583c625eeca5563b03f69143b5
|
src/lib/misc.rs
|
rust
|
lgpl-2.1
|
Remove current directory from begginnig of displayed paths
|
// Copyright (C) 2016 Élisabeth HENRY.
//
// This file is part of Crowbook.
//
// Crowbook is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 2.1 of the License, or
// (at your option) any later version.
//
// Crowbook is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Crowbook. If not, see <http://www.gnu.org/licenses/>.
//! Misc utility functions used across crowbook
use std;
use std::path::Path;
/// Try to canonicalize a path using std::fs::canonicalize, and returns the
/// unmodified path if it fails (e.g. if the path doesn't exist (yet))
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
if let Ok(path) = std::fs::canonicalize(path.as_ref()) {
format!("{}", path.display())
} else {
format!("{}", path.as_ref().display())
}
}
|
// Copyright (C) 2016 Élisabeth HENRY.
//
// This file is part of Crowbook.
//
// Crowbook is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 2.1 of the License, or
// (at your option) any later version.
//
// Crowbook is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Crowbook. If not, see <http://www.gnu.org/licenses/>.
//! Misc utility functions used across crowbook
use std;
use std::path::Path;
use std::io::Result;
/// Try to canonicalize a path using std::fs::canonicalize, and returns the
/// unmodified path if it fails (e.g. if the path doesn't exist (yet))
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
try_canonicalize(path.as_ref())
.unwrap_or(format!("{}", path.as_ref().display()))
}
fn try_canonicalize<P: AsRef<Path>>(path: P) -> Result<String> {
let path = std::fs::canonicalize(path.as_ref())?;
let cwd = std::env::current_dir()?;
Ok(if let Ok(path) = path.strip_prefix(&cwd) {
format!("{}", path.display())
} else {
format!("{}", path.display())
})
}
| 12
| 3
| 2
|
mixed
|
--- a/src/lib/misc.rs
+++ b/src/lib/misc.rs
@@ -21,2 +21,3 @@
use std::path::Path;
+use std::io::Result;
@@ -25,7 +26,15 @@
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
- if let Ok(path) = std::fs::canonicalize(path.as_ref()) {
+ try_canonicalize(path.as_ref())
+ .unwrap_or(format!("{}", path.as_ref().display()))
+}
+
+
+fn try_canonicalize<P: AsRef<Path>>(path: P) -> Result<String> {
+ let path = std::fs::canonicalize(path.as_ref())?;
+ let cwd = std::env::current_dir()?;
+ Ok(if let Ok(path) = path.strip_prefix(&cwd) {
format!("{}", path.display())
} else {
- format!("{}", path.as_ref().display())
- }
+ format!("{}", path.display())
+ })
}
|
--- a/src/lib/misc.rs
+++ b/src/lib/misc.rs
@@ ... @@
use std::path::Path;
+use std::io::Result;
@@ ... @@
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
- if let Ok(path) = std::fs::canonicalize(path.as_ref()) {
+ try_canonicalize(path.as_ref())
+ .unwrap_or(format!("{}", path.as_ref().display()))
+}
+
+
+fn try_canonicalize<P: AsRef<Path>>(path: P) -> Result<String> {
+ let path = std::fs::canonicalize(path.as_ref())?;
+ let cwd = std::env::current_dir()?;
+ Ok(if let Ok(path) = path.strip_prefix(&cwd) {
format!("{}", path.display())
} else {
- format!("{}", path.as_ref().display())
- }
+ format!("{}", path.display())
+ })
}
|
--- a/src/lib/misc.rs
+++ b/src/lib/misc.rs
@@ -21,2 +21,3 @@
CON use std::path::Path;
ADD use std::io::Result;
CON
@@ -25,7 +26,15 @@
CON pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
DEL if let Ok(path) = std::fs::canonicalize(path.as_ref()) {
ADD try_canonicalize(path.as_ref())
ADD .unwrap_or(format!("{}", path.as_ref().display()))
ADD }
ADD
ADD
ADD fn try_canonicalize<P: AsRef<Path>>(path: P) -> Result<String> {
ADD let path = std::fs::canonicalize(path.as_ref())?;
ADD let cwd = std::env::current_dir()?;
ADD Ok(if let Ok(path) = path.strip_prefix(&cwd) {
CON format!("{}", path.display())
CON } else {
DEL format!("{}", path.as_ref().display())
DEL }
ADD format!("{}", path.display())
ADD })
CON }
|
<<<<<<< SEARCH
use std;
use std::path::Path;
/// Try to canonicalize a path using std::fs::canonicalize, and returns the
/// unmodified path if it fails (e.g. if the path doesn't exist (yet))
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
if let Ok(path) = std::fs::canonicalize(path.as_ref()) {
format!("{}", path.display())
} else {
format!("{}", path.as_ref().display())
}
}
=======
use std;
use std::path::Path;
use std::io::Result;
/// Try to canonicalize a path using std::fs::canonicalize, and returns the
/// unmodified path if it fails (e.g. if the path doesn't exist (yet))
pub fn canonicalize<P: AsRef<Path>>(path: P) -> String {
try_canonicalize(path.as_ref())
.unwrap_or(format!("{}", path.as_ref().display()))
}
fn try_canonicalize<P: AsRef<Path>>(path: P) -> Result<String> {
let path = std::fs::canonicalize(path.as_ref())?;
let cwd = std::env::current_dir()?;
Ok(if let Ok(path) = path.strip_prefix(&cwd) {
format!("{}", path.display())
} else {
format!("{}", path.display())
})
}
>>>>>>> REPLACE
|
jean79/yested_fw
|
a875db9ef0304d20c80231d94df39af68a9c3c61
|
src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
|
kotlin
|
mit
|
Add test to try and diagnose why format string isn't being honored.
|
package net.yested.ext.bootstrap3
import net.yested.core.properties.Property
import net.yested.core.properties.toProperty
import net.yested.ext.bootstrap3.utils.*
import org.junit.Test
import spec.*
/**
* A test for [dateInput], etc.
* @author Eric Pabst ([email protected])
* Date: 9/29/16
* Time: 1:51 PM
*/
class InputTest {
@Test
fun MomentProperty_asText_shouldKeepInSync() {
val builder = FormatStringBuilder()
val formatter = builder.year.fourDigits + "." + builder.month.twoDigits + "." + builder.dayOfMonth.twoDigits
val moment: Property<Moment?> = Moment.now().toProperty()
val thisYear = moment.get()!!.year
val text: Property<String> = moment.asText(formatter.toString())
text.get().mustContain(thisYear.toString())
text.set("2015.12.21")
moment.get()?.year.mustBe(2015)
moment.set(Moment.now())
text.get().mustContain(thisYear.toString())
}
}
|
package net.yested.ext.bootstrap3
import net.yested.core.properties.Property
import net.yested.core.properties.toProperty
import net.yested.ext.bootstrap3.utils.*
import org.junit.Test
import spec.*
/**
* A test for [dateInput], etc.
* @author Eric Pabst ([email protected])
* Date: 9/29/16
* Time: 1:51 PM
*/
class InputTest {
@Test
fun MomentProperty_asText_shouldKeepInSync() {
val builder = FormatStringBuilder()
val formatter = builder.year.fourDigits + "." + builder.month.twoDigits + "." + builder.dayOfMonth.twoDigits
val moment: Property<Moment?> = Moment.now().toProperty()
val thisYear = moment.get()!!.year
val text: Property<String> = moment.asText(formatter.toString())
text.get().mustContain(thisYear.toString())
text.set("2015.12.21")
moment.get()?.year.mustBe(2015)
moment.set(Moment.now())
text.get().mustContain(thisYear.toString())
}
@Test
fun MomentProperty_asText_shouldUseFormatString() {
val builder = FormatStringBuilder()
val inputFormatString = (builder.month.oneDigit + "/" + builder.dayOfMonth.oneDigit + "/" + builder.year.fourDigits).toString()
val moment: Property<Moment?> = Moment.parse("1/30/2015", inputFormatString).toProperty()
val formatter = builder.month.oneDigit + " @ " + builder.dayOfMonth.oneDigit + "!" + builder.year.fourDigits
val text: Property<String> = moment.asText(formatter.toString())
text.get().mustBe("1 @ 30!2015")
}
}
| 12
| 0
| 1
|
add_only
|
--- a/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
+++ b/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
@@ -31,2 +31,14 @@
}
+
+ @Test
+ fun MomentProperty_asText_shouldUseFormatString() {
+ val builder = FormatStringBuilder()
+
+ val inputFormatString = (builder.month.oneDigit + "/" + builder.dayOfMonth.oneDigit + "/" + builder.year.fourDigits).toString()
+ val moment: Property<Moment?> = Moment.parse("1/30/2015", inputFormatString).toProperty()
+
+ val formatter = builder.month.oneDigit + " @ " + builder.dayOfMonth.oneDigit + "!" + builder.year.fourDigits
+ val text: Property<String> = moment.asText(formatter.toString())
+ text.get().mustBe("1 @ 30!2015")
+ }
}
|
--- a/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
+++ b/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
@@ ... @@
}
+
+ @Test
+ fun MomentProperty_asText_shouldUseFormatString() {
+ val builder = FormatStringBuilder()
+
+ val inputFormatString = (builder.month.oneDigit + "/" + builder.dayOfMonth.oneDigit + "/" + builder.year.fourDigits).toString()
+ val moment: Property<Moment?> = Moment.parse("1/30/2015", inputFormatString).toProperty()
+
+ val formatter = builder.month.oneDigit + " @ " + builder.dayOfMonth.oneDigit + "!" + builder.year.fourDigits
+ val text: Property<String> = moment.asText(formatter.toString())
+ text.get().mustBe("1 @ 30!2015")
+ }
}
|
--- a/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
+++ b/src/test/kotlin/net/yested/ext/bootstrap3/InputTest.kt
@@ -31,2 +31,14 @@
CON }
ADD
ADD @Test
ADD fun MomentProperty_asText_shouldUseFormatString() {
ADD val builder = FormatStringBuilder()
ADD
ADD val inputFormatString = (builder.month.oneDigit + "/" + builder.dayOfMonth.oneDigit + "/" + builder.year.fourDigits).toString()
ADD val moment: Property<Moment?> = Moment.parse("1/30/2015", inputFormatString).toProperty()
ADD
ADD val formatter = builder.month.oneDigit + " @ " + builder.dayOfMonth.oneDigit + "!" + builder.year.fourDigits
ADD val text: Property<String> = moment.asText(formatter.toString())
ADD text.get().mustBe("1 @ 30!2015")
ADD }
CON }
|
<<<<<<< SEARCH
text.get().mustContain(thisYear.toString())
}
}
=======
text.get().mustContain(thisYear.toString())
}
@Test
fun MomentProperty_asText_shouldUseFormatString() {
val builder = FormatStringBuilder()
val inputFormatString = (builder.month.oneDigit + "/" + builder.dayOfMonth.oneDigit + "/" + builder.year.fourDigits).toString()
val moment: Property<Moment?> = Moment.parse("1/30/2015", inputFormatString).toProperty()
val formatter = builder.month.oneDigit + " @ " + builder.dayOfMonth.oneDigit + "!" + builder.year.fourDigits
val text: Property<String> = moment.asText(formatter.toString())
text.get().mustBe("1 @ 30!2015")
}
}
>>>>>>> REPLACE
|
angus-ai/angus-sdk-python
|
a53612d5f276180d204378b9e4974fcd812f6a5b
|
tests/fake_camera.py
|
python
|
apache-2.0
|
Add licence header in fake camera test file.
|
from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
| 19
| 0
| 1
|
add_only
|
--- a/tests/fake_camera.py
+++ b/tests/fake_camera.py
@@ -1 +1,20 @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
from os import listdir
|
--- a/tests/fake_camera.py
+++ b/tests/fake_camera.py
@@ ... @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
from os import listdir
|
--- a/tests/fake_camera.py
+++ b/tests/fake_camera.py
@@ -1 +1,20 @@
ADD # -*- coding: utf-8 -*-
ADD
ADD # Licensed to the Apache Software Foundation (ASF) under one
ADD # or more contributor license agreements. See the NOTICE file
ADD # distributed with this work for additional information
ADD # regarding copyright ownership. The ASF licenses this file
ADD # to you under the Apache License, Version 2.0 (the
ADD # "License"); you may not use this file except in compliance
ADD # with the License. You may obtain a copy of the License at
ADD #
ADD # http://www.apache.org/licenses/LICENSE-2.0
ADD #
ADD # Unless required by applicable law or agreed to in writing,
ADD # software distributed under the License is distributed on an
ADD # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
ADD # KIND, either express or implied. See the License for the
ADD # specific language governing permissions and limitations
ADD # under the License.
ADD
CON from os import listdir
|
<<<<<<< SEARCH
from os import listdir
from os.path import isfile, join
=======
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from os import listdir
from os.path import isfile, join
>>>>>>> REPLACE
|
nbigaouette/rust-sorting
|
33390f257dbc8d614f93354d0c7d3196106ea0bd
|
src/efficientsorts/quick.rs
|
rust
|
bsd-3-clause
|
Implement n = 1, n = 2 special cases
|
//! Quicksort algorithm.
//!
//! The `efficient` module contains the efficient sorting algorithm "Quicksort".
//!
//! Source: https://en.wikipedia.org/wiki/Quicksort
/// Quicksort
///
/// # Details
///
///
///
/// # Scaling
///
///
///
/// # Optimizations
///
/// None
///
/// # Notes
///
/// The type T of the vector elements to sort _must_ implement the `PartialOrd` trait so the
/// compiler knows how to compare the elements and sort them.
///
/// # Examples
///
/// ```
/// let mut data: Vec<i32> = vec![4, 2, 3, 1, 5];
/// sorting::efficientsorts::quick::sort(&mut data);
/// assert_eq!(vec![1, 2, 3, 4, 5], data);
/// ```
///
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
unimplemented!();
}
|
//! Quicksort algorithm.
//!
//! The `efficient` module contains the efficient sorting algorithm "Quicksort".
//!
//! Source: https://en.wikipedia.org/wiki/Quicksort
/// Quicksort
///
/// # Details
///
///
///
/// # Scaling
///
///
///
/// # Optimizations
///
/// None
///
/// # Notes
///
/// The type T of the vector elements to sort _must_ implement the `PartialOrd` trait so the
/// compiler knows how to compare the elements and sort them.
///
/// # Examples
///
/// ```
/// let mut data: Vec<i32> = vec![4, 2, 3, 1, 5];
/// sorting::efficientsorts::quick::sort(&mut data);
/// assert_eq!(vec![1, 2, 3, 4, 5], data);
/// ```
///
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
let n = array.len();
if n <= 1 {
return;
} else if n == 2 {
if array.first() <= array.last() {
return;
} else {
array.swap(0, 1);
return;
}
} else {
unimplemented!();
}
}
| 14
| 1
| 1
|
mixed
|
--- a/src/efficientsorts/quick.rs
+++ b/src/efficientsorts/quick.rs
@@ -35,3 +35,16 @@
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
- unimplemented!();
+ let n = array.len();
+
+ if n <= 1 {
+ return;
+ } else if n == 2 {
+ if array.first() <= array.last() {
+ return;
+ } else {
+ array.swap(0, 1);
+ return;
+ }
+ } else {
+ unimplemented!();
+ }
}
|
--- a/src/efficientsorts/quick.rs
+++ b/src/efficientsorts/quick.rs
@@ ... @@
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
- unimplemented!();
+ let n = array.len();
+
+ if n <= 1 {
+ return;
+ } else if n == 2 {
+ if array.first() <= array.last() {
+ return;
+ } else {
+ array.swap(0, 1);
+ return;
+ }
+ } else {
+ unimplemented!();
+ }
}
|
--- a/src/efficientsorts/quick.rs
+++ b/src/efficientsorts/quick.rs
@@ -35,3 +35,16 @@
CON pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
DEL unimplemented!();
ADD let n = array.len();
ADD
ADD if n <= 1 {
ADD return;
ADD } else if n == 2 {
ADD if array.first() <= array.last() {
ADD return;
ADD } else {
ADD array.swap(0, 1);
ADD return;
ADD }
ADD } else {
ADD unimplemented!();
ADD }
CON }
|
<<<<<<< SEARCH
///
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
unimplemented!();
}
=======
///
pub fn sort<T: PartialOrd>(array: &mut Vec<T>) {
let n = array.len();
if n <= 1 {
return;
} else if n == 2 {
if array.first() <= array.last() {
return;
} else {
array.swap(0, 1);
return;
}
} else {
unimplemented!();
}
}
>>>>>>> REPLACE
|
aquatir/remember_java_api
|
c7b60080e3bd026ec75721c98e2df356bf22f026
|
code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
|
kotlin
|
mit
|
Add kotlin object/function extension example
|
package codesample.kotlin.sandbox.classes
interface One {
fun doSomething() {
println("One flying!")
}
}
interface Two {
fun doSomething() {
println("Two flying!")
}
}
/**
* If both interfaces have a method with the same signature, you ccan override it once.
* You can also call respective super methods if required
*/
class OneTwoImplementor: One, Two {
override fun doSomething() {
super<One>.doSomething()
super<Two>.doSomething()
}
}
fun main(args: Array<String>) {
val one: One = OneTwoImplementor()
val two: Two = OneTwoImplementor()
one.doSomething()
two.doSomething()
}
|
package codesample.kotlin.sandbox.classes
interface One {
fun doSomething() {
println("One flying!")
}
}
interface Two {
fun doSomething() {
println("Two flying!")
}
}
/**
* If both interfaces have a method with the same signature, you ccan override it once.
* You can also call respective super methods if required
*/
class OneTwoImplementor: One, Two {
override fun doSomething() {
super<One>.doSomething()
super<Two>.doSomething()
}
/**
* Kotlin's way to static methoods is companion objects!
*/
companion object {
fun behaveLikeStaticButItsNot() {
println("I'm not actually static")
}
}
}
fun main(args: Array<String>) {
val one: One = OneTwoImplementor()
val two: Two = OneTwoImplementor()
val oneTwo: OneTwoImplementor = OneTwoImplementor();
one.doSomething()
two.doSomething()
oneTwo.doSomething()
println(one.yetAnotherInterfaceFunction())
println(oneTwo.yetAnotherInterfaceFunction())
OneTwoImplementor.behaveLikeStaticButItsNot()
// two.yetAnotherInterfaceFunction() DOESN'T WORK!
}
/**
* This is called extensions. We add yet another function to interface, and all classes implementing
* this interface can now use this function. Note: Classes can not override this function
*/
fun One.yetAnotherInterfaceFunction() : String {
return "another interface function"
}
| 26
| 0
| 3
|
add_only
|
--- a/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
+++ b/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
@@ -24,2 +24,11 @@
}
+
+ /**
+ * Kotlin's way to static methoods is companion objects!
+ */
+ companion object {
+ fun behaveLikeStaticButItsNot() {
+ println("I'm not actually static")
+ }
+ }
}
@@ -30,2 +39,3 @@
val two: Two = OneTwoImplementor()
+ val oneTwo: OneTwoImplementor = OneTwoImplementor();
@@ -33,2 +43,18 @@
two.doSomething()
+ oneTwo.doSomething()
+
+
+ println(one.yetAnotherInterfaceFunction())
+ println(oneTwo.yetAnotherInterfaceFunction())
+ OneTwoImplementor.behaveLikeStaticButItsNot()
+ // two.yetAnotherInterfaceFunction() DOESN'T WORK!
+
}
+
+/**
+ * This is called extensions. We add yet another function to interface, and all classes implementing
+ * this interface can now use this function. Note: Classes can not override this function
+ */
+fun One.yetAnotherInterfaceFunction() : String {
+ return "another interface function"
+}
|
--- a/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
+++ b/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
@@ ... @@
}
+
+ /**
+ * Kotlin's way to static methoods is companion objects!
+ */
+ companion object {
+ fun behaveLikeStaticButItsNot() {
+ println("I'm not actually static")
+ }
+ }
}
@@ ... @@
val two: Two = OneTwoImplementor()
+ val oneTwo: OneTwoImplementor = OneTwoImplementor();
@@ ... @@
two.doSomething()
+ oneTwo.doSomething()
+
+
+ println(one.yetAnotherInterfaceFunction())
+ println(oneTwo.yetAnotherInterfaceFunction())
+ OneTwoImplementor.behaveLikeStaticButItsNot()
+ // two.yetAnotherInterfaceFunction() DOESN'T WORK!
+
}
+
+/**
+ * This is called extensions. We add yet another function to interface, and all classes implementing
+ * this interface can now use this function. Note: Classes can not override this function
+ */
+fun One.yetAnotherInterfaceFunction() : String {
+ return "another interface function"
+}
|
--- a/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
+++ b/code-sample-kotlin/src/codesample/kotlin/sandbox/classes/ClassesSortOfMultipleInheritance.kt
@@ -24,2 +24,11 @@
CON }
ADD
ADD /**
ADD * Kotlin's way to static methoods is companion objects!
ADD */
ADD companion object {
ADD fun behaveLikeStaticButItsNot() {
ADD println("I'm not actually static")
ADD }
ADD }
CON }
@@ -30,2 +39,3 @@
CON val two: Two = OneTwoImplementor()
ADD val oneTwo: OneTwoImplementor = OneTwoImplementor();
CON
@@ -33,2 +43,18 @@
CON two.doSomething()
ADD oneTwo.doSomething()
ADD
ADD
ADD println(one.yetAnotherInterfaceFunction())
ADD println(oneTwo.yetAnotherInterfaceFunction())
ADD OneTwoImplementor.behaveLikeStaticButItsNot()
ADD // two.yetAnotherInterfaceFunction() DOESN'T WORK!
ADD
CON }
ADD
ADD /**
ADD * This is called extensions. We add yet another function to interface, and all classes implementing
ADD * this interface can now use this function. Note: Classes can not override this function
ADD */
ADD fun One.yetAnotherInterfaceFunction() : String {
ADD return "another interface function"
ADD }
|
<<<<<<< SEARCH
super<Two>.doSomething()
}
}
=======
super<Two>.doSomething()
}
/**
* Kotlin's way to static methoods is companion objects!
*/
companion object {
fun behaveLikeStaticButItsNot() {
println("I'm not actually static")
}
}
}
>>>>>>> REPLACE
<<<<<<< SEARCH
val one: One = OneTwoImplementor()
val two: Two = OneTwoImplementor()
one.doSomething()
two.doSomething()
}
=======
val one: One = OneTwoImplementor()
val two: Two = OneTwoImplementor()
val oneTwo: OneTwoImplementor = OneTwoImplementor();
one.doSomething()
two.doSomething()
oneTwo.doSomething()
println(one.yetAnotherInterfaceFunction())
println(oneTwo.yetAnotherInterfaceFunction())
OneTwoImplementor.behaveLikeStaticButItsNot()
// two.yetAnotherInterfaceFunction() DOESN'T WORK!
}
/**
* This is called extensions. We add yet another function to interface, and all classes implementing
* this interface can now use this function. Note: Classes can not override this function
*/
fun One.yetAnotherInterfaceFunction() : String {
return "another interface function"
}
>>>>>>> REPLACE
|
sipXtapi/sipXtapi-svn-mirror
|
52dfd59bea84a6cd434f6192fc812aabbe6dfccb
|
sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
|
java
|
lgpl-2.1
|
FIX BUILD: Unit test assumed polycom authid was first visible on page
git-svn-id: f26ccc5efe72c2bd8e1c40f599fe313f2692e4de@7583 a612230a-c5fa-0310-af8b-88eea846685b
|
/*
*
*
* Copyright (C) 2004 SIPfoundry Inc.
* Licensed by SIPfoundry under the LGPL license.
*
* Copyright (C) 2004 Pingtel Corp.
* Licensed to SIPfoundry under a Contributor Agreement.
*
* $
*/
package org.sipfoundry.sipxconfig.site.phone.polycom;
import junit.framework.Test;
import net.sourceforge.jwebunit.WebTestCase;
import org.sipfoundry.sipxconfig.site.SiteTestHelper;
import org.sipfoundry.sipxconfig.site.phone.PhoneTestHelper;
import org.w3c.dom.Element;
public class PasswordSettingTestUi extends WebTestCase {
private PhoneTestHelper m_helper;
public static Test suite() throws Exception {
return SiteTestHelper.webTestSuite(PasswordSettingTestUi.class);
}
protected void setUp() throws Exception {
super.setUp();
getTestContext().setBaseUrl(SiteTestHelper.getBaseUrl());
m_helper = new PhoneTestHelper(tester);
m_helper.reset();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testEditSipSetttings() {
m_helper.seedLine(1);
clickLink("ManagePhones");
clickLinkWithText(SiteTestHelper.TEST_USER);
clickLinkWithText("Registration");
Element passwordField = getDialog().getElement("setting:auth.password");
assertEquals("password", passwordField.getAttribute("type"));
}
}
|
/*
*
*
* Copyright (C) 2004 SIPfoundry Inc.
* Licensed by SIPfoundry under the LGPL license.
*
* Copyright (C) 2004 Pingtel Corp.
* Licensed to SIPfoundry under a Contributor Agreement.
*
* $
*/
package org.sipfoundry.sipxconfig.site.phone.polycom;
import junit.framework.Test;
import net.sourceforge.jwebunit.WebTestCase;
import org.sipfoundry.sipxconfig.site.SiteTestHelper;
import org.sipfoundry.sipxconfig.site.phone.PhoneTestHelper;
import org.w3c.dom.Element;
public class PasswordSettingTestUi extends WebTestCase {
private PhoneTestHelper m_helper;
public static Test suite() throws Exception {
return SiteTestHelper.webTestSuite(PasswordSettingTestUi.class);
}
protected void setUp() throws Exception {
super.setUp();
getTestContext().setBaseUrl(SiteTestHelper.getBaseUrl());
m_helper = new PhoneTestHelper(tester);
m_helper.reset();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testEditSipSetttings() {
m_helper.seedLine(1);
SiteTestHelper.setScriptingEnabled(true);
clickLink("ManagePhones");
clickLinkWithText(SiteTestHelper.TEST_USER);
clickLinkWithText("Registration");
clickLink("setting:toggle");
Element passwordField = getDialog().getElement("setting:auth.password");
assertEquals("password", passwordField.getAttribute("type"));
}
}
| 2
| 0
| 2
|
add_only
|
--- a/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
+++ b/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
@@ -41,2 +41,3 @@
m_helper.seedLine(1);
+ SiteTestHelper.setScriptingEnabled(true);
clickLink("ManagePhones");
@@ -44,2 +45,3 @@
clickLinkWithText("Registration");
+ clickLink("setting:toggle");
Element passwordField = getDialog().getElement("setting:auth.password");
|
--- a/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
+++ b/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
@@ ... @@
m_helper.seedLine(1);
+ SiteTestHelper.setScriptingEnabled(true);
clickLink("ManagePhones");
@@ ... @@
clickLinkWithText("Registration");
+ clickLink("setting:toggle");
Element passwordField = getDialog().getElement("setting:auth.password");
|
--- a/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
+++ b/sipXconfig/web/test/org/sipfoundry/sipxconfig/site/phone/polycom/PasswordSettingTestUi.java
@@ -41,2 +41,3 @@
CON m_helper.seedLine(1);
ADD SiteTestHelper.setScriptingEnabled(true);
CON clickLink("ManagePhones");
@@ -44,2 +45,3 @@
CON clickLinkWithText("Registration");
ADD clickLink("setting:toggle");
CON Element passwordField = getDialog().getElement("setting:auth.password");
|
<<<<<<< SEARCH
public void testEditSipSetttings() {
m_helper.seedLine(1);
clickLink("ManagePhones");
clickLinkWithText(SiteTestHelper.TEST_USER);
clickLinkWithText("Registration");
Element passwordField = getDialog().getElement("setting:auth.password");
assertEquals("password", passwordField.getAttribute("type"));
=======
public void testEditSipSetttings() {
m_helper.seedLine(1);
SiteTestHelper.setScriptingEnabled(true);
clickLink("ManagePhones");
clickLinkWithText(SiteTestHelper.TEST_USER);
clickLinkWithText("Registration");
clickLink("setting:toggle");
Element passwordField = getDialog().getElement("setting:auth.password");
assertEquals("password", passwordField.getAttribute("type"));
>>>>>>> REPLACE
|
sswierczek/Helix-Movie-Guide-Android
|
4b0478ba8f20af534c1b8fe02eae794a8b8cd0f9
|
app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
|
kotlin
|
apache-2.0
|
Fix issue with blinking list on infinite scroll data reload
|
package com.androidmess.helix.discover.view
import android.support.v7.widget.RecyclerView
import android.view.View
import android.view.ViewGroup
import com.androidmess.helix.R
import com.androidmess.helix.common.ui.recyclerview.RecyclerViewItemSizeCalculator
import com.androidmess.helix.common.ui.view.inflate
import com.androidmess.helix.databinding.DiscoverListItemBinding
import com.androidmess.helix.discover.model.data.DiscoverMovieViewModel
class DiscoverAdapter(private val itemSizeCalculator: RecyclerViewItemSizeCalculator)
: RecyclerView.Adapter<DiscoverAdapter.ViewHolder>() {
private val data: MutableList<DiscoverMovieViewModel> = ArrayList()
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val view = parent.inflate(R.layout.discover_list_item, false)
val viewParams = view.layoutParams
viewParams.width = itemSizeCalculator.itemWidth
return ViewHolder(view)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) = holder.bind(data[position])
override fun getItemCount(): Int = data.size
fun addData(movies: List<DiscoverMovieViewModel>) {
data.addAll(movies)
notifyDataSetChanged()
}
class ViewHolder(view: View) : RecyclerView.ViewHolder(view) {
private var binding: DiscoverListItemBinding = DiscoverListItemBinding.bind(view)
fun bind(movieModel: DiscoverMovieViewModel) {
binding.movie = movieModel
}
}
}
|
package com.androidmess.helix.discover.view
import android.support.v7.widget.RecyclerView
import android.view.View
import android.view.ViewGroup
import com.androidmess.helix.R
import com.androidmess.helix.common.ui.recyclerview.RecyclerViewItemSizeCalculator
import com.androidmess.helix.common.ui.view.inflate
import com.androidmess.helix.databinding.DiscoverListItemBinding
import com.androidmess.helix.discover.model.data.DiscoverMovieViewModel
class DiscoverAdapter(private val itemSizeCalculator: RecyclerViewItemSizeCalculator)
: RecyclerView.Adapter<DiscoverAdapter.ViewHolder>() {
private val data: MutableList<DiscoverMovieViewModel> = ArrayList()
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val view = parent.inflate(R.layout.discover_list_item, false)
val viewParams = view.layoutParams
viewParams.width = itemSizeCalculator.itemWidth
return ViewHolder(view)
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) = holder.bind(data[position])
override fun getItemCount(): Int = data.size
fun addData(movies: List<DiscoverMovieViewModel>) {
data.addAll(movies)
notifyItemRangeInserted(data.size - 1, movies.size)
}
class ViewHolder(view: View) : RecyclerView.ViewHolder(view) {
private var binding: DiscoverListItemBinding = DiscoverListItemBinding.bind(view)
fun bind(movieModel: DiscoverMovieViewModel) {
binding.movie = movieModel
}
}
}
| 1
| 1
| 1
|
mixed
|
--- a/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
+++ b/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
@@ -29,3 +29,3 @@
data.addAll(movies)
- notifyDataSetChanged()
+ notifyItemRangeInserted(data.size - 1, movies.size)
}
|
--- a/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
+++ b/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
@@ ... @@
data.addAll(movies)
- notifyDataSetChanged()
+ notifyItemRangeInserted(data.size - 1, movies.size)
}
|
--- a/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
+++ b/app/src/main/kotlin/com/androidmess/helix/discover/view/DiscoverAdapter.kt
@@ -29,3 +29,3 @@
CON data.addAll(movies)
DEL notifyDataSetChanged()
ADD notifyItemRangeInserted(data.size - 1, movies.size)
CON }
|
<<<<<<< SEARCH
fun addData(movies: List<DiscoverMovieViewModel>) {
data.addAll(movies)
notifyDataSetChanged()
}
=======
fun addData(movies: List<DiscoverMovieViewModel>) {
data.addAll(movies)
notifyItemRangeInserted(data.size - 1, movies.size)
}
>>>>>>> REPLACE
|
4minitz/4minitz
|
d995749a6fd4d44cb5fb92a15766c7eeef566f38
|
imports/collections/onlineusers_private.js
|
javascript
|
mit
|
Use `upsert` method of schema class
|
import { Meteor } from 'meteor/meteor';
import { OnlineUsersSchema } from './onlineusers.schema';
import moment from 'moment/moment';
if (Meteor.isServer) {
Meteor.publish('onlineUsersForRoute', function (route) {
return OnlineUsersSchema.find({activeRoute: route});
});
}
const checkRouteParamAndAuthorization = (route, userId) => {
check(route, String);
if (!userId) {
throw new Meteor.Error('not-authorized');
}
};
Meteor.methods({
'onlineUsers.enterRoute'(route) {
const userId = Meteor.userId();
checkRouteParamAndAuthorization(route, userId);
const doc = {
userId: userId,
activeRoute:route,
updatedAt: new Date()
};
const selector = { userId: userId, activeRoute:route };
const existingDoc = OnlineUsersSchema.findOne(selector);
if (existingDoc) {
OnlineUsersSchema.update(selector, doc);
} else {
OnlineUsersSchema.insert(doc);
}
// remove outdated entries
const aMinAgo = moment().add(-1,'minutes').toDate();
OnlineUsersSchema.remove({updatedAt: {"$lt" : aMinAgo}});
},
'onlineUsers.leaveRoute'(route) {
const userId = Meteor.userId();
checkRouteParamAndAuthorization(route, userId);
OnlineUsersSchema.remove({userId: userId, activeRoute:route});
}
});
|
import { Meteor } from 'meteor/meteor';
import { OnlineUsersSchema } from './onlineusers.schema';
import moment from 'moment/moment';
if (Meteor.isServer) {
Meteor.publish('onlineUsersForRoute', function (route) {
return OnlineUsersSchema.find({activeRoute: route});
});
}
const checkRouteParamAndAuthorization = (route, userId) => {
check(route, String);
if (!userId) {
throw new Meteor.Error('not-authorized');
}
};
Meteor.methods({
'onlineUsers.enterRoute'(route) {
const userId = Meteor.userId();
checkRouteParamAndAuthorization(route, userId);
OnlineUsersSchema.upsert(
{ userId: userId, activeRoute:route },
{ updatedAt: new Date() }
);
// remove outdated entries
const aMinAgo = moment().add(-1,'minutes').toDate();
OnlineUsersSchema.remove({updatedAt: {"$lt" : aMinAgo}});
},
'onlineUsers.leaveRoute'(route) {
const userId = Meteor.userId();
checkRouteParamAndAuthorization(route, userId);
OnlineUsersSchema.remove({userId: userId, activeRoute:route});
}
});
| 4
| 13
| 1
|
mixed
|
--- a/imports/collections/onlineusers_private.js
+++ b/imports/collections/onlineusers_private.js
@@ -22,15 +22,6 @@
- const doc = {
- userId: userId,
- activeRoute:route,
- updatedAt: new Date()
- };
- const selector = { userId: userId, activeRoute:route };
- const existingDoc = OnlineUsersSchema.findOne(selector);
-
- if (existingDoc) {
- OnlineUsersSchema.update(selector, doc);
- } else {
- OnlineUsersSchema.insert(doc);
- }
+ OnlineUsersSchema.upsert(
+ { userId: userId, activeRoute:route },
+ { updatedAt: new Date() }
+ );
|
--- a/imports/collections/onlineusers_private.js
+++ b/imports/collections/onlineusers_private.js
@@ ... @@
- const doc = {
- userId: userId,
- activeRoute:route,
- updatedAt: new Date()
- };
- const selector = { userId: userId, activeRoute:route };
- const existingDoc = OnlineUsersSchema.findOne(selector);
-
- if (existingDoc) {
- OnlineUsersSchema.update(selector, doc);
- } else {
- OnlineUsersSchema.insert(doc);
- }
+ OnlineUsersSchema.upsert(
+ { userId: userId, activeRoute:route },
+ { updatedAt: new Date() }
+ );
|
--- a/imports/collections/onlineusers_private.js
+++ b/imports/collections/onlineusers_private.js
@@ -22,15 +22,6 @@
CON
DEL const doc = {
DEL userId: userId,
DEL activeRoute:route,
DEL updatedAt: new Date()
DEL };
DEL const selector = { userId: userId, activeRoute:route };
DEL const existingDoc = OnlineUsersSchema.findOne(selector);
DEL
DEL if (existingDoc) {
DEL OnlineUsersSchema.update(selector, doc);
DEL } else {
DEL OnlineUsersSchema.insert(doc);
DEL }
ADD OnlineUsersSchema.upsert(
ADD { userId: userId, activeRoute:route },
ADD { updatedAt: new Date() }
ADD );
CON
|
<<<<<<< SEARCH
checkRouteParamAndAuthorization(route, userId);
const doc = {
userId: userId,
activeRoute:route,
updatedAt: new Date()
};
const selector = { userId: userId, activeRoute:route };
const existingDoc = OnlineUsersSchema.findOne(selector);
if (existingDoc) {
OnlineUsersSchema.update(selector, doc);
} else {
OnlineUsersSchema.insert(doc);
}
// remove outdated entries
=======
checkRouteParamAndAuthorization(route, userId);
OnlineUsersSchema.upsert(
{ userId: userId, activeRoute:route },
{ updatedAt: new Date() }
);
// remove outdated entries
>>>>>>> REPLACE
|
matthiasbeyer/imag
|
455d6e88ed49c62e100b0153474a18b39fa47b48
|
src/module/bm/header.rs
|
rust
|
lgpl-2.1
|
Use real array instead of text array to save tags
|
use storage::file::FileHeaderSpec as FHS;
use storage::file::FileHeaderData as FHD;
pub fn get_spec() -> FHS {
FHS::Map { keys: vec![ url_key(), tags_key() ] }
}
fn url_key() -> FHS {
FHS::Key { name: String::from("URL"), value_type: Box::new(FHS::Text) }
}
fn tags_key() -> FHS {
FHS::Key { name: String::from("TAGS"), value_type: Box::new(text_array()) }
}
fn text_array() -> FHS {
FHS::Array { allowed_types: vec![FHS::Text] }
}
pub fn build_header(url: &String, tags: &Vec<String>) -> FHD {
FHD::Map {
keys: vec![
FHD::Key {
name: String::from("URL"),
value: Box::new(FHD::Text(url.clone()))
},
FHD::Key {
name: String::from("TAGS"),
value: Box::new(FHD::Text(tags.connect(",")))
}
]
}
}
|
use storage::file::FileHeaderSpec as FHS;
use storage::file::FileHeaderData as FHD;
pub fn get_spec() -> FHS {
FHS::Map { keys: vec![ url_key(), tags_key() ] }
}
fn url_key() -> FHS {
FHS::Key { name: String::from("URL"), value_type: Box::new(FHS::Text) }
}
fn tags_key() -> FHS {
FHS::Key { name: String::from("TAGS"), value_type: Box::new(text_array()) }
}
fn text_array() -> FHS {
FHS::Array { allowed_types: vec![FHS::Text] }
}
pub fn build_header(url: &String, tags: &Vec<String>) -> FHD {
FHD::Map {
keys: vec![
FHD::Key {
name: String::from("URL"),
value: Box::new(FHD::Text(url.clone()))
},
FHD::Key {
name: String::from("TAGS"),
value: Box::new(build_tag_array(tags))
}
]
}
}
fn build_tag_array(tags: &Vec<String>) -> FHD {
let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
FHD::Array { values: Box::new(texttags) }
}
| 6
| 1
| 2
|
mixed
|
--- a/src/module/bm/header.rs
+++ b/src/module/bm/header.rs
@@ -29,3 +29,3 @@
name: String::from("TAGS"),
- value: Box::new(FHD::Text(tags.connect(",")))
+ value: Box::new(build_tag_array(tags))
}
@@ -35 +35,6 @@
+fn build_tag_array(tags: &Vec<String>) -> FHD {
+ let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
+ FHD::Array { values: Box::new(texttags) }
+}
+
|
--- a/src/module/bm/header.rs
+++ b/src/module/bm/header.rs
@@ ... @@
name: String::from("TAGS"),
- value: Box::new(FHD::Text(tags.connect(",")))
+ value: Box::new(build_tag_array(tags))
}
@@ ... @@
+fn build_tag_array(tags: &Vec<String>) -> FHD {
+ let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
+ FHD::Array { values: Box::new(texttags) }
+}
+
|
--- a/src/module/bm/header.rs
+++ b/src/module/bm/header.rs
@@ -29,3 +29,3 @@
CON name: String::from("TAGS"),
DEL value: Box::new(FHD::Text(tags.connect(",")))
ADD value: Box::new(build_tag_array(tags))
CON }
@@ -35 +35,6 @@
CON
ADD fn build_tag_array(tags: &Vec<String>) -> FHD {
ADD let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
ADD FHD::Array { values: Box::new(texttags) }
ADD }
ADD
|
<<<<<<< SEARCH
FHD::Key {
name: String::from("TAGS"),
value: Box::new(FHD::Text(tags.connect(",")))
}
]
}
}
=======
FHD::Key {
name: String::from("TAGS"),
value: Box::new(build_tag_array(tags))
}
]
}
}
fn build_tag_array(tags: &Vec<String>) -> FHD {
let texttags = tags.into_iter().map(|t| FHD::Text(t.clone())).collect();
FHD::Array { values: Box::new(texttags) }
}
>>>>>>> REPLACE
|
RubiginousChanticleer/rubiginouschanticleer
|
c8728e67698b49d3ead29d1d09824af5a218df79
|
server/sessions_users/sessions_users.js
|
javascript
|
mpl-2.0
|
Refactor stub into real query
|
var db = require( '../config/db' );
var helpers = require( '../config/helpers' );
var Sequelize = require( 'sequelize' );
var User = require( '../users/users' );
var Session = require( '../sessions/sessions' );
var Session_User = db.define( 'sessions_users', {
user_id: {
type: Sequelize.INTEGER,
unique: 'session_user_idx'
},
session_id: {
type: Sequelize.INTEGER,
unique: 'session_user_idx'
}
} );
Session_User.sync().then( function(){
console.log("sessions_users table created");
} )
.catch( function( err ){
console.error(err);
} );
Session_User.belongsTo( User, {foreignKey: 'user_id'} );
Session_User.belongsTo( Session, {foreignKey: 'session_id'} );
Session_User.getSessionUserBySessionIdAndUserId = function( sessionID, userID ) {
return Session_User.findOne({where: {session_id: sessionID, user_id: userID} })
.catch( function( err ) {
helpers.errorLogger( err );
});
};
Session_User.countUsersInOneSession = function( sessionID ) {
/* STUB FOR TESTING, REMOVE WHEN THIS FUNCTION IS IMPLEMENTED */
return {
then: function( resolve ) {
if( sessionID == 1 ) {
resolve( 2 );
} else {
resolve( 0 );
}
}
}
/* END STUB */
};
module.exports = Session_User;
|
var db = require( '../config/db' );
var helpers = require( '../config/helpers' );
var Sequelize = require( 'sequelize' );
var User = require( '../users/users' );
var Session = require( '../sessions/sessions' );
var Session_User = db.define( 'sessions_users', {
user_id: {
type: Sequelize.INTEGER,
unique: 'session_user_idx'
},
session_id: {
type: Sequelize.INTEGER,
unique: 'session_user_idx'
}
} );
Session_User.sync().then( function(){
console.log("sessions_users table created");
} )
.catch( function( err ){
console.error(err);
} );
Session_User.belongsTo( User, {foreignKey: 'user_id'} );
Session_User.belongsTo( Session, {foreignKey: 'session_id'} );
Session_User.getSessionUserBySessionIdAndUserId = function( sessionID, userID ) {
return Session_User.findOne({where: {session_id: sessionID, user_id: userID} })
.catch( function( err ) {
helpers.errorLogger( err );
});
};
Session_User.countUsersInOneSession = function( sessionID ) {
return Session_User.count( { where: { id: sessionId } } )
.catch( function( err ) {
helpers.errorLogger( err );
});
};
module.exports = Session_User;
| 4
| 11
| 1
|
mixed
|
--- a/server/sessions_users/sessions_users.js
+++ b/server/sessions_users/sessions_users.js
@@ -35,13 +35,6 @@
Session_User.countUsersInOneSession = function( sessionID ) {
- /* STUB FOR TESTING, REMOVE WHEN THIS FUNCTION IS IMPLEMENTED */
- return {
- then: function( resolve ) {
- if( sessionID == 1 ) {
- resolve( 2 );
- } else {
- resolve( 0 );
- }
- }
- }
- /* END STUB */
+ return Session_User.count( { where: { id: sessionId } } )
+ .catch( function( err ) {
+ helpers.errorLogger( err );
+ });
};
|
--- a/server/sessions_users/sessions_users.js
+++ b/server/sessions_users/sessions_users.js
@@ ... @@
Session_User.countUsersInOneSession = function( sessionID ) {
- /* STUB FOR TESTING, REMOVE WHEN THIS FUNCTION IS IMPLEMENTED */
- return {
- then: function( resolve ) {
- if( sessionID == 1 ) {
- resolve( 2 );
- } else {
- resolve( 0 );
- }
- }
- }
- /* END STUB */
+ return Session_User.count( { where: { id: sessionId } } )
+ .catch( function( err ) {
+ helpers.errorLogger( err );
+ });
};
|
--- a/server/sessions_users/sessions_users.js
+++ b/server/sessions_users/sessions_users.js
@@ -35,13 +35,6 @@
CON Session_User.countUsersInOneSession = function( sessionID ) {
DEL /* STUB FOR TESTING, REMOVE WHEN THIS FUNCTION IS IMPLEMENTED */
DEL return {
DEL then: function( resolve ) {
DEL if( sessionID == 1 ) {
DEL resolve( 2 );
DEL } else {
DEL resolve( 0 );
DEL }
DEL }
DEL }
DEL /* END STUB */
ADD return Session_User.count( { where: { id: sessionId } } )
ADD .catch( function( err ) {
ADD helpers.errorLogger( err );
ADD });
CON };
|
<<<<<<< SEARCH
Session_User.countUsersInOneSession = function( sessionID ) {
/* STUB FOR TESTING, REMOVE WHEN THIS FUNCTION IS IMPLEMENTED */
return {
then: function( resolve ) {
if( sessionID == 1 ) {
resolve( 2 );
} else {
resolve( 0 );
}
}
}
/* END STUB */
};
=======
Session_User.countUsersInOneSession = function( sessionID ) {
return Session_User.count( { where: { id: sessionId } } )
.catch( function( err ) {
helpers.errorLogger( err );
});
};
>>>>>>> REPLACE
|
DanielMartinus/Stepper-Touch
|
5165c5fc8b06ed33cc28bf06de4bcfc5cfc87920
|
app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
|
kotlin
|
apache-2.0
|
Add 100 items to the list
|
package nl.dionsegijn.steppertouchdemo.recyclerview
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.LinearLayoutManager
import com.xwray.groupie.GroupieAdapter
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
import nl.dionsegijn.steppertouchdemo.R
import nl.dionsegijn.steppertouchdemo.recyclerview.items.StepperTouchItem
import nl.dionsegijn.steppertouchdemo.recyclerview.items.TextItem
class RecyclerViewFragment : Fragment() {
private val adapter = GroupieAdapter()
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
return inflater.inflate(R.layout.fragment_recycler_view, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
recyclerView.adapter = this.adapter
recyclerView.layoutManager =
LinearLayoutManager(requireContext(), LinearLayoutManager.VERTICAL, false)
adapter.addAll(listOf(
TextItem("First item"),
TextItem("Second item"),
StepperTouchItem(),
TextItem("Fourth item"),
TextItem("Fifth item"),
StepperTouchItem(),
TextItem("Seventh item"),
TextItem("Eight item"),
TextItem("Ninth item"),
TextItem("Tenth item"),
TextItem("Eleven item"),
TextItem("Twelve item"),
StepperTouchItem(),
TextItem("Thirteen item"),
TextItem("Fourteen item"),
))
adapter.notifyDataSetChanged()
}
}
|
package nl.dionsegijn.steppertouchdemo.recyclerview
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.LinearLayoutManager
import com.xwray.groupie.GroupieAdapter
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
import nl.dionsegijn.steppertouch.StepperTouch
import nl.dionsegijn.steppertouchdemo.R
import nl.dionsegijn.steppertouchdemo.recyclerview.items.StepperTouchItem
import nl.dionsegijn.steppertouchdemo.recyclerview.items.TextItem
class RecyclerViewFragment : Fragment() {
private val adapter = GroupieAdapter()
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
return inflater.inflate(R.layout.fragment_recycler_view, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
recyclerView.adapter = this.adapter
recyclerView.layoutManager =
LinearLayoutManager(requireContext(), LinearLayoutManager.VERTICAL, false)
val list = (1..100).map {
if (it % 5 == 0) StepperTouchItem() else TextItem("Item #$it")
}
adapter.addAll(list)
adapter.notifyDataSetChanged()
}
}
| 7
| 17
| 2
|
mixed
|
--- a/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
+++ b/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
@@ -10,2 +10,3 @@
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
+import nl.dionsegijn.steppertouch.StepperTouch
import nl.dionsegijn.steppertouchdemo.R
@@ -31,19 +32,8 @@
- adapter.addAll(listOf(
- TextItem("First item"),
- TextItem("Second item"),
- StepperTouchItem(),
- TextItem("Fourth item"),
- TextItem("Fifth item"),
- StepperTouchItem(),
- TextItem("Seventh item"),
- TextItem("Eight item"),
- TextItem("Ninth item"),
- TextItem("Tenth item"),
- TextItem("Eleven item"),
- TextItem("Twelve item"),
- StepperTouchItem(),
- TextItem("Thirteen item"),
- TextItem("Fourteen item"),
- ))
+
+ val list = (1..100).map {
+ if (it % 5 == 0) StepperTouchItem() else TextItem("Item #$it")
+ }
+
+ adapter.addAll(list)
adapter.notifyDataSetChanged()
|
--- a/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
+++ b/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
@@ ... @@
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
+import nl.dionsegijn.steppertouch.StepperTouch
import nl.dionsegijn.steppertouchdemo.R
@@ ... @@
- adapter.addAll(listOf(
- TextItem("First item"),
- TextItem("Second item"),
- StepperTouchItem(),
- TextItem("Fourth item"),
- TextItem("Fifth item"),
- StepperTouchItem(),
- TextItem("Seventh item"),
- TextItem("Eight item"),
- TextItem("Ninth item"),
- TextItem("Tenth item"),
- TextItem("Eleven item"),
- TextItem("Twelve item"),
- StepperTouchItem(),
- TextItem("Thirteen item"),
- TextItem("Fourteen item"),
- ))
+
+ val list = (1..100).map {
+ if (it % 5 == 0) StepperTouchItem() else TextItem("Item #$it")
+ }
+
+ adapter.addAll(list)
adapter.notifyDataSetChanged()
|
--- a/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
+++ b/app/src/main/java/nl/dionsegijn/steppertouchdemo/recyclerview/RecyclerViewFragment.kt
@@ -10,2 +10,3 @@
CON import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
ADD import nl.dionsegijn.steppertouch.StepperTouch
CON import nl.dionsegijn.steppertouchdemo.R
@@ -31,19 +32,8 @@
CON
DEL adapter.addAll(listOf(
DEL TextItem("First item"),
DEL TextItem("Second item"),
DEL StepperTouchItem(),
DEL TextItem("Fourth item"),
DEL TextItem("Fifth item"),
DEL StepperTouchItem(),
DEL TextItem("Seventh item"),
DEL TextItem("Eight item"),
DEL TextItem("Ninth item"),
DEL TextItem("Tenth item"),
DEL TextItem("Eleven item"),
DEL TextItem("Twelve item"),
DEL StepperTouchItem(),
DEL TextItem("Thirteen item"),
DEL TextItem("Fourteen item"),
DEL ))
ADD
ADD val list = (1..100).map {
ADD if (it % 5 == 0) StepperTouchItem() else TextItem("Item #$it")
ADD }
ADD
ADD adapter.addAll(list)
CON adapter.notifyDataSetChanged()
|
<<<<<<< SEARCH
import com.xwray.groupie.GroupieAdapter
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
import nl.dionsegijn.steppertouchdemo.R
import nl.dionsegijn.steppertouchdemo.recyclerview.items.StepperTouchItem
=======
import com.xwray.groupie.GroupieAdapter
import kotlinx.android.synthetic.main.fragment_recycler_view.recyclerView
import nl.dionsegijn.steppertouch.StepperTouch
import nl.dionsegijn.steppertouchdemo.R
import nl.dionsegijn.steppertouchdemo.recyclerview.items.StepperTouchItem
>>>>>>> REPLACE
<<<<<<< SEARCH
LinearLayoutManager(requireContext(), LinearLayoutManager.VERTICAL, false)
adapter.addAll(listOf(
TextItem("First item"),
TextItem("Second item"),
StepperTouchItem(),
TextItem("Fourth item"),
TextItem("Fifth item"),
StepperTouchItem(),
TextItem("Seventh item"),
TextItem("Eight item"),
TextItem("Ninth item"),
TextItem("Tenth item"),
TextItem("Eleven item"),
TextItem("Twelve item"),
StepperTouchItem(),
TextItem("Thirteen item"),
TextItem("Fourteen item"),
))
adapter.notifyDataSetChanged()
}
=======
LinearLayoutManager(requireContext(), LinearLayoutManager.VERTICAL, false)
val list = (1..100).map {
if (it % 5 == 0) StepperTouchItem() else TextItem("Item #$it")
}
adapter.addAll(list)
adapter.notifyDataSetChanged()
}
>>>>>>> REPLACE
|
pidah/st2contrib
|
d8b477083866a105947281ca34cb6e215417f44d
|
packs/salt/actions/lib/utils.py
|
python
|
apache-2.0
|
Make distinction between local and runner action payload templates.
Added small description for sanitizing the NetAPI payload for logging.
|
import yaml
action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
def generate_action(module_type, action):
manifest = action_meta
manifest['name'] = "{0}_{1}".format(module_type, action)
manifest['parameters']['action']['default'] = action
fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
def sanitize_payload(keys_to_sanitize, payload):
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
|
# pylint: disable=line-too-long
import yaml
from .meta import actions
runner_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
local_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"args": {
"type": "array",
"required": False
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Execution modules through Salt API",
"enabled": True,
"entry_point": "local.py"}
def generate_actions():
def create_file(mt, m, a):
manifest = local_action_meta
manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
for key in actions:
map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
'''
Removes sensitive data from payloads before
publishing to the logs
'''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
| 41
| 9
| 3
|
mixed
|
--- a/packs/salt/actions/lib/utils.py
+++ b/packs/salt/actions/lib/utils.py
@@ -1,4 +1,7 @@
+# pylint: disable=line-too-long
+
import yaml
+from .meta import actions
-action_meta = {
+runner_action_meta = {
"name": "",
@@ -20,12 +23,37 @@
+local_action_meta = {
+ "name": "",
+ "parameters": {
+ "action": {
+ "type": "string",
+ "immutable": True,
+ "default": ""
+ },
+ "args": {
+ "type": "array",
+ "required": False
+ },
+ "kwargs": {
+ "type": "object",
+ "required": False
+ }
+ },
+ "runner_type": "run-python",
+ "description": "Run Salt Execution modules through Salt API",
+ "enabled": True,
+ "entry_point": "local.py"}
-def generate_action(module_type, action):
- manifest = action_meta
- manifest['name'] = "{0}_{1}".format(module_type, action)
- manifest['parameters']['action']['default'] = action
- fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
- fh.write('---\n')
- fh.write(yaml.dump(manifest, default_flow_style=False))
- fh.close()
+def generate_actions():
+ def create_file(mt, m, a):
+ manifest = local_action_meta
+ manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
+ manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
+
+ fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
+ fh.write('---\n')
+ fh.write(yaml.dump(manifest, default_flow_style=False))
+ fh.close()
+ for key in actions:
+ map(lambda l: create_file('local', key, l), actions[key])
@@ -33,2 +61,6 @@
def sanitize_payload(keys_to_sanitize, payload):
+ '''
+ Removes sensitive data from payloads before
+ publishing to the logs
+ '''
data = payload.copy()
|
--- a/packs/salt/actions/lib/utils.py
+++ b/packs/salt/actions/lib/utils.py
@@ ... @@
+# pylint: disable=line-too-long
+
import yaml
+from .meta import actions
-action_meta = {
+runner_action_meta = {
"name": "",
@@ ... @@
+local_action_meta = {
+ "name": "",
+ "parameters": {
+ "action": {
+ "type": "string",
+ "immutable": True,
+ "default": ""
+ },
+ "args": {
+ "type": "array",
+ "required": False
+ },
+ "kwargs": {
+ "type": "object",
+ "required": False
+ }
+ },
+ "runner_type": "run-python",
+ "description": "Run Salt Execution modules through Salt API",
+ "enabled": True,
+ "entry_point": "local.py"}
-def generate_action(module_type, action):
- manifest = action_meta
- manifest['name'] = "{0}_{1}".format(module_type, action)
- manifest['parameters']['action']['default'] = action
- fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
- fh.write('---\n')
- fh.write(yaml.dump(manifest, default_flow_style=False))
- fh.close()
+def generate_actions():
+ def create_file(mt, m, a):
+ manifest = local_action_meta
+ manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
+ manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
+
+ fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
+ fh.write('---\n')
+ fh.write(yaml.dump(manifest, default_flow_style=False))
+ fh.close()
+ for key in actions:
+ map(lambda l: create_file('local', key, l), actions[key])
@@ ... @@
def sanitize_payload(keys_to_sanitize, payload):
+ '''
+ Removes sensitive data from payloads before
+ publishing to the logs
+ '''
data = payload.copy()
|
--- a/packs/salt/actions/lib/utils.py
+++ b/packs/salt/actions/lib/utils.py
@@ -1,4 +1,7 @@
ADD # pylint: disable=line-too-long
ADD
CON import yaml
ADD from .meta import actions
CON
DEL action_meta = {
ADD runner_action_meta = {
CON "name": "",
@@ -20,12 +23,37 @@
CON
ADD local_action_meta = {
ADD "name": "",
ADD "parameters": {
ADD "action": {
ADD "type": "string",
ADD "immutable": True,
ADD "default": ""
ADD },
ADD "args": {
ADD "type": "array",
ADD "required": False
ADD },
ADD "kwargs": {
ADD "type": "object",
ADD "required": False
ADD }
ADD },
ADD "runner_type": "run-python",
ADD "description": "Run Salt Execution modules through Salt API",
ADD "enabled": True,
ADD "entry_point": "local.py"}
CON
DEL def generate_action(module_type, action):
DEL manifest = action_meta
DEL manifest['name'] = "{0}_{1}".format(module_type, action)
DEL manifest['parameters']['action']['default'] = action
CON
DEL fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
DEL fh.write('---\n')
DEL fh.write(yaml.dump(manifest, default_flow_style=False))
DEL fh.close()
ADD def generate_actions():
ADD def create_file(mt, m, a):
ADD manifest = local_action_meta
ADD manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
ADD manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
ADD
ADD fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
ADD fh.write('---\n')
ADD fh.write(yaml.dump(manifest, default_flow_style=False))
ADD fh.close()
ADD for key in actions:
ADD map(lambda l: create_file('local', key, l), actions[key])
CON
@@ -33,2 +61,6 @@
CON def sanitize_payload(keys_to_sanitize, payload):
ADD '''
ADD Removes sensitive data from payloads before
ADD publishing to the logs
ADD '''
CON data = payload.copy()
|
<<<<<<< SEARCH
import yaml
action_meta = {
"name": "",
"parameters": {
=======
# pylint: disable=line-too-long
import yaml
from .meta import actions
runner_action_meta = {
"name": "",
"parameters": {
>>>>>>> REPLACE
<<<<<<< SEARCH
"entry_point": "runner.py"}
def generate_action(module_type, action):
manifest = action_meta
manifest['name'] = "{0}_{1}".format(module_type, action)
manifest['parameters']['action']['default'] = action
fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
def sanitize_payload(keys_to_sanitize, payload):
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
=======
"entry_point": "runner.py"}
local_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"args": {
"type": "array",
"required": False
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Execution modules through Salt API",
"enabled": True,
"entry_point": "local.py"}
def generate_actions():
def create_file(mt, m, a):
manifest = local_action_meta
manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
for key in actions:
map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
'''
Removes sensitive data from payloads before
publishing to the logs
'''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
>>>>>>> REPLACE
|
crshnburn/rtc-slack-bot
|
f30e75750bb80efb8b922c876b3ec6e9c2e8b433
|
app.js
|
javascript
|
apache-2.0
|
Use attachments for individual work items, all in the same reply.
|
/*eslint-env node*/
//------------------------------------------------------------------------------
// node.js starter application for Bluemix
//------------------------------------------------------------------------------
// cfenv provides access to your Cloud Foundry environment
// for more info, see: https://www.npmjs.com/package/cfenv
var cfenv = require('cfenv');
// get the app environment from Cloud Foundry
var appEnv = cfenv.getAppEnv();
var Botkit = require('botkit')
var controller = Botkit.slackbot({
debug: false
})
var bot = controller.spawn({
token: process.env.BOT_API_TOKEN
}).startRTM(function(err) {
if (err) {
console.error("Bot failed to connect to Slack. Error: " + err)
}
})
controller.hears(['(task|story|epic|defect) (\d*)'],'ambient',function(bot, message){
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
bot.reply(message, process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/"+id)
}
})
|
/*eslint-env node*/
//------------------------------------------------------------------------------
// node.js starter application for Bluemix
//------------------------------------------------------------------------------
// cfenv provides access to your Cloud Foundry environment
// for more info, see: https://www.npmjs.com/package/cfenv
var cfenv = require('cfenv');
// get the app environment from Cloud Foundry
var appEnv = cfenv.getAppEnv();
var Botkit = require('botkit')
var controller = Botkit.slackbot({
debug: false
})
var bot = controller.spawn({
token: process.env.BOT_API_TOKEN
}).startRTM(function(err) {
if (err) {
console.error("Bot failed to connect to Slack. Error: " + err)
}
})
controller.hears(['(task|story|epic|defect) (\d*)'],'ambient',function(bot, message){
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
var attachments = [];
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
attachments.push({
"fallback": matches[i],
"color": "#16B8DF",
"title_link": process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/" + id,
"title": "Work Item " + id
})
}
if (attachments.length > 0) {
bot.reply(message, {
"attachments": attachments
})
}
})
| 13
| 1
| 1
|
mixed
|
--- a/app.js
+++ b/app.js
@@ -28,5 +28,17 @@
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
+ var attachments = [];
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
- bot.reply(message, process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/"+id)
+ attachments.push({
+ "fallback": matches[i],
+ "color": "#16B8DF",
+ "title_link": process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/" + id,
+ "title": "Work Item " + id
+
+ })
+ }
+ if (attachments.length > 0) {
+ bot.reply(message, {
+ "attachments": attachments
+ })
}
|
--- a/app.js
+++ b/app.js
@@ ... @@
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
+ var attachments = [];
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
- bot.reply(message, process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/"+id)
+ attachments.push({
+ "fallback": matches[i],
+ "color": "#16B8DF",
+ "title_link": process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/" + id,
+ "title": "Work Item " + id
+
+ })
+ }
+ if (attachments.length > 0) {
+ bot.reply(message, {
+ "attachments": attachments
+ })
}
|
--- a/app.js
+++ b/app.js
@@ -28,5 +28,17 @@
CON var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
ADD var attachments = [];
CON for(var i=0; i < matches.length; i++){
CON var id = matches[i].split(" ")[1]
DEL bot.reply(message, process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/"+id)
ADD attachments.push({
ADD "fallback": matches[i],
ADD "color": "#16B8DF",
ADD "title_link": process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/" + id,
ADD "title": "Work Item " + id
ADD
ADD })
ADD }
ADD if (attachments.length > 0) {
ADD bot.reply(message, {
ADD "attachments": attachments
ADD })
CON }
|
<<<<<<< SEARCH
controller.hears(['(task|story|epic|defect) (\d*)'],'ambient',function(bot, message){
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
bot.reply(message, process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/"+id)
}
})
=======
controller.hears(['(task|story|epic|defect) (\d*)'],'ambient',function(bot, message){
var matches = message.text.match(/(task|story|epic|defect) (\d*)/ig)
var attachments = [];
for(var i=0; i < matches.length; i++){
var id = matches[i].split(" ")[1]
attachments.push({
"fallback": matches[i],
"color": "#16B8DF",
"title_link": process.env.JAZZ_URI + "/resource/itemName/com.ibm.team.workitem.WorkItem/" + id,
"title": "Work Item " + id
})
}
if (attachments.length > 0) {
bot.reply(message, {
"attachments": attachments
})
}
})
>>>>>>> REPLACE
|
ibinti/intellij-community
|
2b896b891010fca83736ec459c3642bf04115c33
|
plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
|
java
|
apache-2.0
|
Remove unnecessary getting of read action.
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.utils;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import org.jetbrains.idea.maven.dom.MavenDomUtil;
public class MavenProblemFileHighlighter implements Condition<VirtualFile> {
private final Project myProject;
public MavenProblemFileHighlighter(Project project) {
myProject = project;
}
public boolean value(final VirtualFile file) {
AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock();
try {
PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
finally {
accessToken.finish();
}
}
}
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.utils;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import org.jetbrains.idea.maven.dom.MavenDomUtil;
public class MavenProblemFileHighlighter implements Condition<VirtualFile> {
private final Project myProject;
public MavenProblemFileHighlighter(Project project) {
myProject = project;
}
public boolean value(final VirtualFile file) {
PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
}
| 2
| 10
| 2
|
mixed
|
--- a/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
+++ b/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
@@ -17,4 +17,2 @@
-import com.intellij.openapi.application.AccessToken;
-import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
@@ -34,10 +32,4 @@
public boolean value(final VirtualFile file) {
- AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock();
- try {
- PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
- return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
- }
- finally {
- accessToken.finish();
- }
+ PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
+ return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
|
--- a/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
+++ b/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
@@ ... @@
-import com.intellij.openapi.application.AccessToken;
-import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
@@ ... @@
public boolean value(final VirtualFile file) {
- AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock();
- try {
- PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
- return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
- }
- finally {
- accessToken.finish();
- }
+ PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
+ return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
|
--- a/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
+++ b/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenProblemFileHighlighter.java
@@ -17,4 +17,2 @@
CON
DEL import com.intellij.openapi.application.AccessToken;
DEL import com.intellij.openapi.application.ApplicationManager;
CON import com.intellij.openapi.project.Project;
@@ -34,10 +32,4 @@
CON public boolean value(final VirtualFile file) {
DEL AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock();
DEL try {
DEL PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
DEL return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
DEL }
DEL finally {
DEL accessToken.finish();
DEL }
ADD PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
ADD return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
CON }
|
<<<<<<< SEARCH
package org.jetbrains.idea.maven.utils;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
=======
package org.jetbrains.idea.maven.utils;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
>>>>>>> REPLACE
<<<<<<< SEARCH
public boolean value(final VirtualFile file) {
AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock();
try {
PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
finally {
accessToken.finish();
}
}
}
=======
public boolean value(final VirtualFile file) {
PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
return psiFile != null && MavenDomUtil.isMavenFile(psiFile);
}
}
>>>>>>> REPLACE
|
sussol/mobile
|
54e1031eb6009a06e4dfbac062b96aa73f5099fb
|
src/actions/Entities/LocationActions.js
|
javascript
|
mit
|
Add location action updates for saving and resetting
|
import { generateUUID } from 'react-native-database';
import { selectNewLocationId } from '../../selectors/Entities/location';
export const LOCATION_ACTIONS = {
CREATE: 'LOCATION/create',
UPDATE: 'LOCATION/update',
SAVE_NEW: 'LOCATION/saveNew',
};
const createDefaultLocation = () => ({
id: generateUUID(),
description: '',
code: '',
});
const create = () => ({
type: LOCATION_ACTIONS.CREATE,
payload: createDefaultLocation(),
});
const update = (id, field, value) => ({
type: LOCATION_ACTIONS.UPDATE,
payload: { id, field, value },
});
const saveNew = location => ({
type: LOCATION_ACTIONS.SAVE_NEW,
payload: { location },
});
const updateNew = (value, field) => (dispatch, getState) => {
const newLocationId = selectNewLocationId(getState());
dispatch(update(newLocationId, field, value));
};
export const LocationActions = {
create,
update,
updateNew,
saveNew,
};
|
import { generateUUID } from 'react-native-database';
import { selectNewLocationId } from '../../selectors/Entities/location';
export const LOCATION_ACTIONS = {
CREATE: 'LOCATION/create',
UPDATE: 'LOCATION/update',
SAVE_NEW: 'LOCATION/saveNew',
SAVE_EDITING: 'LOCATION/saveEditing',
RESET: 'LOCATION/reset',
};
const createDefaultLocation = () => ({
id: generateUUID(),
description: '',
code: '',
});
const create = () => ({
type: LOCATION_ACTIONS.CREATE,
payload: { location: createDefaultLocation() },
});
const reset = () => ({
type: LOCATION_ACTIONS.RESET,
});
const update = (id, field, value) => ({
type: LOCATION_ACTIONS.UPDATE,
payload: { id, field, value },
});
const saveNew = location => ({
type: LOCATION_ACTIONS.SAVE_NEW,
payload: { location },
});
const saveEditing = location => ({
type: LOCATION_ACTIONS.SAVE_EDITING,
payload: { location },
});
const updateNew = (value, field) => (dispatch, getState) => {
const newLocationId = selectNewLocationId(getState());
dispatch(update(newLocationId, field, value));
};
export const LocationActions = {
create,
update,
updateNew,
saveNew,
saveEditing,
reset,
};
| 14
| 1
| 4
|
mixed
|
--- a/src/actions/Entities/LocationActions.js
+++ b/src/actions/Entities/LocationActions.js
@@ -7,2 +7,4 @@
SAVE_NEW: 'LOCATION/saveNew',
+ SAVE_EDITING: 'LOCATION/saveEditing',
+ RESET: 'LOCATION/reset',
};
@@ -17,3 +19,7 @@
type: LOCATION_ACTIONS.CREATE,
- payload: createDefaultLocation(),
+ payload: { location: createDefaultLocation() },
+});
+
+const reset = () => ({
+ type: LOCATION_ACTIONS.RESET,
});
@@ -27,2 +33,7 @@
type: LOCATION_ACTIONS.SAVE_NEW,
+ payload: { location },
+});
+
+const saveEditing = location => ({
+ type: LOCATION_ACTIONS.SAVE_EDITING,
payload: { location },
@@ -40,2 +51,4 @@
saveNew,
+ saveEditing,
+ reset,
};
|
--- a/src/actions/Entities/LocationActions.js
+++ b/src/actions/Entities/LocationActions.js
@@ ... @@
SAVE_NEW: 'LOCATION/saveNew',
+ SAVE_EDITING: 'LOCATION/saveEditing',
+ RESET: 'LOCATION/reset',
};
@@ ... @@
type: LOCATION_ACTIONS.CREATE,
- payload: createDefaultLocation(),
+ payload: { location: createDefaultLocation() },
+});
+
+const reset = () => ({
+ type: LOCATION_ACTIONS.RESET,
});
@@ ... @@
type: LOCATION_ACTIONS.SAVE_NEW,
+ payload: { location },
+});
+
+const saveEditing = location => ({
+ type: LOCATION_ACTIONS.SAVE_EDITING,
payload: { location },
@@ ... @@
saveNew,
+ saveEditing,
+ reset,
};
|
--- a/src/actions/Entities/LocationActions.js
+++ b/src/actions/Entities/LocationActions.js
@@ -7,2 +7,4 @@
CON SAVE_NEW: 'LOCATION/saveNew',
ADD SAVE_EDITING: 'LOCATION/saveEditing',
ADD RESET: 'LOCATION/reset',
CON };
@@ -17,3 +19,7 @@
CON type: LOCATION_ACTIONS.CREATE,
DEL payload: createDefaultLocation(),
ADD payload: { location: createDefaultLocation() },
ADD });
ADD
ADD const reset = () => ({
ADD type: LOCATION_ACTIONS.RESET,
CON });
@@ -27,2 +33,7 @@
CON type: LOCATION_ACTIONS.SAVE_NEW,
ADD payload: { location },
ADD });
ADD
ADD const saveEditing = location => ({
ADD type: LOCATION_ACTIONS.SAVE_EDITING,
CON payload: { location },
@@ -40,2 +51,4 @@
CON saveNew,
ADD saveEditing,
ADD reset,
CON };
|
<<<<<<< SEARCH
UPDATE: 'LOCATION/update',
SAVE_NEW: 'LOCATION/saveNew',
};
=======
UPDATE: 'LOCATION/update',
SAVE_NEW: 'LOCATION/saveNew',
SAVE_EDITING: 'LOCATION/saveEditing',
RESET: 'LOCATION/reset',
};
>>>>>>> REPLACE
<<<<<<< SEARCH
const create = () => ({
type: LOCATION_ACTIONS.CREATE,
payload: createDefaultLocation(),
});
=======
const create = () => ({
type: LOCATION_ACTIONS.CREATE,
payload: { location: createDefaultLocation() },
});
const reset = () => ({
type: LOCATION_ACTIONS.RESET,
});
>>>>>>> REPLACE
<<<<<<< SEARCH
const saveNew = location => ({
type: LOCATION_ACTIONS.SAVE_NEW,
payload: { location },
});
=======
const saveNew = location => ({
type: LOCATION_ACTIONS.SAVE_NEW,
payload: { location },
});
const saveEditing = location => ({
type: LOCATION_ACTIONS.SAVE_EDITING,
payload: { location },
});
>>>>>>> REPLACE
<<<<<<< SEARCH
updateNew,
saveNew,
};
=======
updateNew,
saveNew,
saveEditing,
reset,
};
>>>>>>> REPLACE
|
InseeFr/Pogues-Back-Office
|
fd6fd3e24277010aec1064a542d7c43b3d3cdd65
|
src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
|
java
|
mit
|
Use value declarations instead of env injection
|
package fr.insee.pogues.transforms;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.nio.charset.StandardCharsets;
import java.util.Map;
@Service
public class StromaeServiceImpl implements StromaeService {
@Autowired
Environment env;
@Autowired
HttpClient httpClient;
private String serviceUri;
@PostConstruct
public void setUp(){
serviceUri = env.getProperty("fr.insee.pogues.api.remote.stromae.vis.url");
}
@Override
public String transform(String input, Map<String, Object> params) throws Exception {
try {
String uri = String.format("%s/%s", serviceUri,
params.get("name"));
HttpPost post = new HttpPost(uri);
post.setEntity(new StringEntity(input, StandardCharsets.UTF_8));
post.setHeader("Content-type", "application/xml");
HttpResponse response = httpClient.execute(post);
return EntityUtils.toString(response.getEntity());
} catch(Exception e) {
throw e;
}
}
}
|
package fr.insee.pogues.transforms;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.nio.charset.StandardCharsets;
import java.util.Map;
@Service
public class StromaeServiceImpl implements StromaeService {
@Autowired
HttpClient httpClient;
@Value("${fr.insee.pogues.api.remote.stromae.vis.url}")
private String serviceUri;
@Override
public String transform(String input, Map<String, Object> params) throws Exception {
try {
String uri = String.format("%s/%s", serviceUri,
params.get("name"));
HttpPost post = new HttpPost(uri);
post.setEntity(new StringEntity(input, StandardCharsets.UTF_8));
post.setHeader("Content-type", "application/xml");
HttpResponse response = httpClient.execute(post);
return EntityUtils.toString(response.getEntity());
} catch(Exception e) {
throw e;
}
}
}
| 2
| 10
| 2
|
mixed
|
--- a/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
+++ b/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
@@ -8,6 +8,5 @@
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.core.env.Environment;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
-import javax.annotation.PostConstruct;
import java.nio.charset.StandardCharsets;
@@ -19,13 +18,6 @@
@Autowired
- Environment env;
-
- @Autowired
HttpClient httpClient;
+ @Value("${fr.insee.pogues.api.remote.stromae.vis.url}")
private String serviceUri;
-
- @PostConstruct
- public void setUp(){
- serviceUri = env.getProperty("fr.insee.pogues.api.remote.stromae.vis.url");
- }
|
--- a/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
+++ b/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
@@ ... @@
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.core.env.Environment;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
-import javax.annotation.PostConstruct;
import java.nio.charset.StandardCharsets;
@@ ... @@
@Autowired
- Environment env;
-
- @Autowired
HttpClient httpClient;
+ @Value("${fr.insee.pogues.api.remote.stromae.vis.url}")
private String serviceUri;
-
- @PostConstruct
- public void setUp(){
- serviceUri = env.getProperty("fr.insee.pogues.api.remote.stromae.vis.url");
- }
|
--- a/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
+++ b/src/main/java/fr/insee/pogues/transforms/StromaeServiceImpl.java
@@ -8,6 +8,5 @@
CON import org.springframework.beans.factory.annotation.Autowired;
DEL import org.springframework.core.env.Environment;
ADD import org.springframework.beans.factory.annotation.Value;
CON import org.springframework.stereotype.Service;
CON
DEL import javax.annotation.PostConstruct;
CON import java.nio.charset.StandardCharsets;
@@ -19,13 +18,6 @@
CON @Autowired
DEL Environment env;
DEL
DEL @Autowired
CON HttpClient httpClient;
CON
ADD @Value("${fr.insee.pogues.api.remote.stromae.vis.url}")
CON private String serviceUri;
DEL
DEL @PostConstruct
DEL public void setUp(){
DEL serviceUri = env.getProperty("fr.insee.pogues.api.remote.stromae.vis.url");
DEL }
CON
|
<<<<<<< SEARCH
import org.apache.http.util.EntityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.nio.charset.StandardCharsets;
import java.util.Map;
=======
import org.apache.http.util.EntityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.nio.charset.StandardCharsets;
import java.util.Map;
>>>>>>> REPLACE
<<<<<<< SEARCH
@Autowired
Environment env;
@Autowired
HttpClient httpClient;
private String serviceUri;
@PostConstruct
public void setUp(){
serviceUri = env.getProperty("fr.insee.pogues.api.remote.stromae.vis.url");
}
@Override
=======
@Autowired
HttpClient httpClient;
@Value("${fr.insee.pogues.api.remote.stromae.vis.url}")
private String serviceUri;
@Override
>>>>>>> REPLACE
|
ecmwf/cfgrib
|
68eb1bd58b84c1937f6f8d15bb9ea9f02a402e22
|
tests/cdscommon.py
|
python
|
apache-2.0
|
Drop impossible to get right code.
|
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
def message_count(dataset, request, count=1):
if dataset == 'reanalysis-era5-single-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'reanalysis-era5-pressure-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'seasonal-original-single-levels':
count = 51
elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
count = 51
for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
value = request.get(key)
if isinstance(value, list):
count *= len(value)
return count
|
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
| 0
| 18
| 1
|
del_only
|
--- a/tests/cdscommon.py
+++ b/tests/cdscommon.py
@@ -27,19 +27 @@
return path
-
-
-def message_count(dataset, request, count=1):
- if dataset == 'reanalysis-era5-single-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'reanalysis-era5-pressure-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'seasonal-original-single-levels':
- count = 51
- elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
- count = 51
- for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
- value = request.get(key)
- if isinstance(value, list):
- count *= len(value)
- return count
|
--- a/tests/cdscommon.py
+++ b/tests/cdscommon.py
@@ ... @@
return path
-
-
-def message_count(dataset, request, count=1):
- if dataset == 'reanalysis-era5-single-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'reanalysis-era5-pressure-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'seasonal-original-single-levels':
- count = 51
- elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
- count = 51
- for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
- value = request.get(key)
- if isinstance(value, list):
- count *= len(value)
- return count
|
--- a/tests/cdscommon.py
+++ b/tests/cdscommon.py
@@ -27,19 +27 @@
CON return path
DEL
DEL
DEL def message_count(dataset, request, count=1):
DEL if dataset == 'reanalysis-era5-single-levels' \
DEL and request.get('product_type') == 'ensemble_members':
DEL count = 20
DEL elif dataset == 'reanalysis-era5-pressure-levels' \
DEL and request.get('product_type') == 'ensemble_members':
DEL count = 20
DEL elif dataset == 'seasonal-original-single-levels':
DEL count = 51
DEL elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
DEL count = 51
DEL for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
DEL value = request.get(key)
DEL if isinstance(value, list):
DEL count *= len(value)
DEL return count
|
<<<<<<< SEARCH
raise
return path
def message_count(dataset, request, count=1):
if dataset == 'reanalysis-era5-single-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'reanalysis-era5-pressure-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'seasonal-original-single-levels':
count = 51
elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
count = 51
for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
value = request.get(key)
if isinstance(value, list):
count *= len(value)
return count
=======
raise
return path
>>>>>>> REPLACE
|
dropbox/changes
|
dfd3bff4560d1711624b8508795eb3debbaafa40
|
changes/api/snapshotimage_details.py
|
python
|
apache-2.0
|
Mark snapshots as inactive if any are not valid
|
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
|
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
elif snapshot.status == SnapshotStatus.active:
snapshot.status = SnapshotStatus.inactive
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
| 3
| 0
| 1
|
add_only
|
--- a/changes/api/snapshotimage_details.py
+++ b/changes/api/snapshotimage_details.py
@@ -42,2 +42,5 @@
db.session.add(snapshot)
+ elif snapshot.status == SnapshotStatus.active:
+ snapshot.status = SnapshotStatus.inactive
+ db.session.add(snapshot)
|
--- a/changes/api/snapshotimage_details.py
+++ b/changes/api/snapshotimage_details.py
@@ ... @@
db.session.add(snapshot)
+ elif snapshot.status == SnapshotStatus.active:
+ snapshot.status = SnapshotStatus.inactive
+ db.session.add(snapshot)
|
--- a/changes/api/snapshotimage_details.py
+++ b/changes/api/snapshotimage_details.py
@@ -42,2 +42,5 @@
CON db.session.add(snapshot)
ADD elif snapshot.status == SnapshotStatus.active:
ADD snapshot.status = SnapshotStatus.inactive
ADD db.session.add(snapshot)
CON
|
<<<<<<< SEARCH
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
db.session.commit()
=======
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
elif snapshot.status == SnapshotStatus.active:
snapshot.status = SnapshotStatus.inactive
db.session.add(snapshot)
db.session.commit()
>>>>>>> REPLACE
|
jwittevrongel/playchaser
|
bdeafec96e4ffa90ba1662176433440907a7e699
|
app/lib/db/MigrationManager.js
|
javascript
|
mit
|
Add migration creation logic to MM.create()
|
"use strict";
var fs = require('fs'),
path = require('path');
exports.up = function(migrationName) {
};
exports.down = function(migrationName) {
};
exports.create = function(migrationName) {
var migrationTemplate = [
'"use strict"',
'',
'exports.up = function(mongoose, next) {',
' next();',
'}',
'',
'exports.down = function(mongoose, next) {',
' next();',
'}',
''
].join('\n');
};
|
"use strict";
var fs = require('fs'),
path = require('path');
var baseDirectory = path.join(__dirname, '..', '..');
var relativeMigrationDirectory = path.join('db', 'migrations');
var absoluteMigrationDirectory = path.join(baseDirectory, relativeMigrationDirectory);
function padNumeral(numeral) {
return Array(7 - numeral.toString().length).join('0') + numeral;
};
function makeNameFilename(migrationName) {
return migrationName.replace(/\s+/g, '-');
};
exports.up = function(migrationName) {
};
exports.down = function(migrationName) {
};
exports.create = function(migrationName) {
var migrationTemplate = [
'"use strict"',
'',
'exports.up = function(mongoose, next) {',
' next();',
'}',
'',
'exports.down = function(mongoose, next) {',
' next();',
'}',
''
].join('\n');
try {
fs.mkdirSync(absoluteMigrationDirectory, parseInt('0775', 8));
} catch (err) {
// ignore error creating directory
}
var existingMigrationOrdinals = fs.readdirSync(absoluteMigrationDirectory).map(function(filename) {
return parseInt(filename.match(/^(\d+)/)[1], 10);
}).sort(function(a, b) {
return a - b;
});
var nextOrdinal = (existingMigrationOrdinals.pop() || 0) + 1;
var fileName = padNumeral(nextOrdinal) + "-" + makeNameFilename(migrationName) + ".js";
var absoluteFileName = path.join(absoluteMigrationDirectory, fileName);
fs.writeFileSync(absoluteFileName, migrationTemplate, {mode: parseInt('0664', 8)});
console.log("New migration created: " + path.join(relativeMigrationDirectory, fileName));
};
| 44
| 17
| 2
|
mixed
|
--- a/app/lib/db/MigrationManager.js
+++ b/app/lib/db/MigrationManager.js
@@ -4,3 +4,15 @@
path = require('path');
-
+
+var baseDirectory = path.join(__dirname, '..', '..');
+var relativeMigrationDirectory = path.join('db', 'migrations');
+var absoluteMigrationDirectory = path.join(baseDirectory, relativeMigrationDirectory);
+
+function padNumeral(numeral) {
+ return Array(7 - numeral.toString().length).join('0') + numeral;
+};
+
+function makeNameFilename(migrationName) {
+ return migrationName.replace(/\s+/g, '-');
+};
+
exports.up = function(migrationName) {
@@ -12,20 +24,35 @@
exports.create = function(migrationName) {
- var migrationTemplate = [
- '"use strict"',
- '',
- 'exports.up = function(mongoose, next) {',
- ' next();',
- '}',
- '',
- 'exports.down = function(mongoose, next) {',
- ' next();',
- '}',
- ''
- ].join('\n');
-
-
-
+ var migrationTemplate = [
+ '"use strict"',
+ '',
+ 'exports.up = function(mongoose, next) {',
+ ' next();',
+ '}',
+ '',
+ 'exports.down = function(mongoose, next) {',
+ ' next();',
+ '}',
+ ''
+ ].join('\n');
+
+ try {
+ fs.mkdirSync(absoluteMigrationDirectory, parseInt('0775', 8));
+ } catch (err) {
+ // ignore error creating directory
+ }
+
+ var existingMigrationOrdinals = fs.readdirSync(absoluteMigrationDirectory).map(function(filename) {
+ return parseInt(filename.match(/^(\d+)/)[1], 10);
+ }).sort(function(a, b) {
+ return a - b;
+ });
+
+ var nextOrdinal = (existingMigrationOrdinals.pop() || 0) + 1;
+ var fileName = padNumeral(nextOrdinal) + "-" + makeNameFilename(migrationName) + ".js";
+ var absoluteFileName = path.join(absoluteMigrationDirectory, fileName);
+ fs.writeFileSync(absoluteFileName, migrationTemplate, {mode: parseInt('0664', 8)});
+ console.log("New migration created: " + path.join(relativeMigrationDirectory, fileName));
};
-
+
|
--- a/app/lib/db/MigrationManager.js
+++ b/app/lib/db/MigrationManager.js
@@ ... @@
path = require('path');
-
+
+var baseDirectory = path.join(__dirname, '..', '..');
+var relativeMigrationDirectory = path.join('db', 'migrations');
+var absoluteMigrationDirectory = path.join(baseDirectory, relativeMigrationDirectory);
+
+function padNumeral(numeral) {
+ return Array(7 - numeral.toString().length).join('0') + numeral;
+};
+
+function makeNameFilename(migrationName) {
+ return migrationName.replace(/\s+/g, '-');
+};
+
exports.up = function(migrationName) {
@@ ... @@
exports.create = function(migrationName) {
- var migrationTemplate = [
- '"use strict"',
- '',
- 'exports.up = function(mongoose, next) {',
- ' next();',
- '}',
- '',
- 'exports.down = function(mongoose, next) {',
- ' next();',
- '}',
- ''
- ].join('\n');
-
-
-
+ var migrationTemplate = [
+ '"use strict"',
+ '',
+ 'exports.up = function(mongoose, next) {',
+ ' next();',
+ '}',
+ '',
+ 'exports.down = function(mongoose, next) {',
+ ' next();',
+ '}',
+ ''
+ ].join('\n');
+
+ try {
+ fs.mkdirSync(absoluteMigrationDirectory, parseInt('0775', 8));
+ } catch (err) {
+ // ignore error creating directory
+ }
+
+ var existingMigrationOrdinals = fs.readdirSync(absoluteMigrationDirectory).map(function(filename) {
+ return parseInt(filename.match(/^(\d+)/)[1], 10);
+ }).sort(function(a, b) {
+ return a - b;
+ });
+
+ var nextOrdinal = (existingMigrationOrdinals.pop() || 0) + 1;
+ var fileName = padNumeral(nextOrdinal) + "-" + makeNameFilename(migrationName) + ".js";
+ var absoluteFileName = path.join(absoluteMigrationDirectory, fileName);
+ fs.writeFileSync(absoluteFileName, migrationTemplate, {mode: parseInt('0664', 8)});
+ console.log("New migration created: " + path.join(relativeMigrationDirectory, fileName));
};
-
+
|
--- a/app/lib/db/MigrationManager.js
+++ b/app/lib/db/MigrationManager.js
@@ -4,3 +4,15 @@
CON path = require('path');
DEL
ADD
ADD var baseDirectory = path.join(__dirname, '..', '..');
ADD var relativeMigrationDirectory = path.join('db', 'migrations');
ADD var absoluteMigrationDirectory = path.join(baseDirectory, relativeMigrationDirectory);
ADD
ADD function padNumeral(numeral) {
ADD return Array(7 - numeral.toString().length).join('0') + numeral;
ADD };
ADD
ADD function makeNameFilename(migrationName) {
ADD return migrationName.replace(/\s+/g, '-');
ADD };
ADD
CON exports.up = function(migrationName) {
@@ -12,20 +24,35 @@
CON exports.create = function(migrationName) {
DEL var migrationTemplate = [
DEL '"use strict"',
DEL '',
DEL 'exports.up = function(mongoose, next) {',
DEL ' next();',
DEL '}',
DEL '',
DEL 'exports.down = function(mongoose, next) {',
DEL ' next();',
DEL '}',
DEL ''
DEL ].join('\n');
DEL
DEL
DEL
ADD var migrationTemplate = [
ADD '"use strict"',
ADD '',
ADD 'exports.up = function(mongoose, next) {',
ADD ' next();',
ADD '}',
ADD '',
ADD 'exports.down = function(mongoose, next) {',
ADD ' next();',
ADD '}',
ADD ''
ADD ].join('\n');
ADD
ADD try {
ADD fs.mkdirSync(absoluteMigrationDirectory, parseInt('0775', 8));
ADD } catch (err) {
ADD // ignore error creating directory
ADD }
ADD
ADD var existingMigrationOrdinals = fs.readdirSync(absoluteMigrationDirectory).map(function(filename) {
ADD return parseInt(filename.match(/^(\d+)/)[1], 10);
ADD }).sort(function(a, b) {
ADD return a - b;
ADD });
ADD
ADD var nextOrdinal = (existingMigrationOrdinals.pop() || 0) + 1;
ADD var fileName = padNumeral(nextOrdinal) + "-" + makeNameFilename(migrationName) + ".js";
ADD var absoluteFileName = path.join(absoluteMigrationDirectory, fileName);
ADD fs.writeFileSync(absoluteFileName, migrationTemplate, {mode: parseInt('0664', 8)});
ADD console.log("New migration created: " + path.join(relativeMigrationDirectory, fileName));
CON };
CON
DEL
ADD
CON
|
<<<<<<< SEARCH
var fs = require('fs'),
path = require('path');
exports.up = function(migrationName) {
};
=======
var fs = require('fs'),
path = require('path');
var baseDirectory = path.join(__dirname, '..', '..');
var relativeMigrationDirectory = path.join('db', 'migrations');
var absoluteMigrationDirectory = path.join(baseDirectory, relativeMigrationDirectory);
function padNumeral(numeral) {
return Array(7 - numeral.toString().length).join('0') + numeral;
};
function makeNameFilename(migrationName) {
return migrationName.replace(/\s+/g, '-');
};
exports.up = function(migrationName) {
};
>>>>>>> REPLACE
<<<<<<< SEARCH
exports.create = function(migrationName) {
var migrationTemplate = [
'"use strict"',
'',
'exports.up = function(mongoose, next) {',
' next();',
'}',
'',
'exports.down = function(mongoose, next) {',
' next();',
'}',
''
].join('\n');
};
=======
exports.create = function(migrationName) {
var migrationTemplate = [
'"use strict"',
'',
'exports.up = function(mongoose, next) {',
' next();',
'}',
'',
'exports.down = function(mongoose, next) {',
' next();',
'}',
''
].join('\n');
try {
fs.mkdirSync(absoluteMigrationDirectory, parseInt('0775', 8));
} catch (err) {
// ignore error creating directory
}
var existingMigrationOrdinals = fs.readdirSync(absoluteMigrationDirectory).map(function(filename) {
return parseInt(filename.match(/^(\d+)/)[1], 10);
}).sort(function(a, b) {
return a - b;
});
var nextOrdinal = (existingMigrationOrdinals.pop() || 0) + 1;
var fileName = padNumeral(nextOrdinal) + "-" + makeNameFilename(migrationName) + ".js";
var absoluteFileName = path.join(absoluteMigrationDirectory, fileName);
fs.writeFileSync(absoluteFileName, migrationTemplate, {mode: parseInt('0664', 8)});
console.log("New migration created: " + path.join(relativeMigrationDirectory, fileName));
};
>>>>>>> REPLACE
|
cxpqwvtj/himawari
|
0bed60ce1edb898d9b9ccba12b656bfd39857df8
|
web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
|
kotlin
|
mit
|
Change val variable to static.
|
package app.himawari.filter
import org.slf4j.MDC
import org.springframework.core.Ordered
import org.springframework.core.annotation.Order
import org.springframework.security.core.context.SecurityContextHolder
import org.springframework.security.core.userdetails.UserDetails
import org.springframework.stereotype.Component
import javax.servlet.*
/**
* Created by cxpqwvtj on 2018/07/29.
*/
@Component
@Order(Ordered.LOWEST_PRECEDENCE - 1)
class UserMDCInsertingServletFilter : Filter {
private val USER_KEY = "username"
override fun init(filterConfig: FilterConfig?) {
// NOP
}
override fun doFilter(request: ServletRequest?, response: ServletResponse?, chain: FilterChain) {
val authentication = SecurityContextHolder.getContext().authentication
val accessUser = if (authentication == null) {
"anonymous"
} else {
val principal = authentication.principal
if (principal is UserDetails) {
principal.username
} else {
principal.toString()
}
}
MDC.put(USER_KEY, accessUser)
try {
chain.doFilter(request, response)
} finally {
MDC.remove(USER_KEY)
}
}
override fun destroy() {
// NOP
}
}
|
package app.himawari.filter
import org.slf4j.MDC
import org.springframework.core.Ordered
import org.springframework.core.annotation.Order
import org.springframework.security.core.context.SecurityContextHolder
import org.springframework.security.core.userdetails.UserDetails
import org.springframework.stereotype.Component
import javax.servlet.*
/**
* Created by cxpqwvtj on 2018/07/29.
*/
@Component
@Order(Ordered.LOWEST_PRECEDENCE - 1)
class UserMDCInsertingServletFilter : Filter {
companion object {
private const val USER_KEY = "username"
}
override fun init(filterConfig: FilterConfig?) {
// NOP
}
override fun doFilter(request: ServletRequest?, response: ServletResponse?, chain: FilterChain) {
val authentication = SecurityContextHolder.getContext().authentication
val accessUser = if (authentication == null) {
"anonymous"
} else {
val principal = authentication.principal
if (principal is UserDetails) {
principal.username
} else {
principal.toString()
}
}
MDC.put(USER_KEY, accessUser)
try {
chain.doFilter(request, response)
} finally {
MDC.remove(USER_KEY)
}
}
override fun destroy() {
// NOP
}
}
| 4
| 1
| 1
|
mixed
|
--- a/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
+++ b/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
@@ -16,3 +16,6 @@
class UserMDCInsertingServletFilter : Filter {
- private val USER_KEY = "username"
+
+ companion object {
+ private const val USER_KEY = "username"
+ }
|
--- a/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
+++ b/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
@@ ... @@
class UserMDCInsertingServletFilter : Filter {
- private val USER_KEY = "username"
+
+ companion object {
+ private const val USER_KEY = "username"
+ }
|
--- a/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
+++ b/web/src/main/kotlin/app/himawari/filter/UserMDCInsertingServletFilter.kt
@@ -16,3 +16,6 @@
CON class UserMDCInsertingServletFilter : Filter {
DEL private val USER_KEY = "username"
ADD
ADD companion object {
ADD private const val USER_KEY = "username"
ADD }
CON
|
<<<<<<< SEARCH
@Order(Ordered.LOWEST_PRECEDENCE - 1)
class UserMDCInsertingServletFilter : Filter {
private val USER_KEY = "username"
override fun init(filterConfig: FilterConfig?) {
=======
@Order(Ordered.LOWEST_PRECEDENCE - 1)
class UserMDCInsertingServletFilter : Filter {
companion object {
private const val USER_KEY = "username"
}
override fun init(filterConfig: FilterConfig?) {
>>>>>>> REPLACE
|
Kurtz1993/ngts-cli
|
1e94670c02239e46afdb45f49f371201d4b34754
|
lib/ngts-module.js
|
javascript
|
mit
|
Add an option to create just the module file
|
#!/usr/bin/env node
const program = require("commander");
const _ = require("lodash");
const utils = require("./utils");
var dest = "";
program
.usage("<module-name> [options]")
.arguments("<module-name>")
.action(function (moduleName) {
cmdModuleName = moduleName;
})
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
.parse(process.argv);
if (typeof cmdModuleName === "undefined") {
console.log("You must specify a name for the module.");
process.exit(1);
}
program.ctrlAlias = program.ctrlAlias || "vm";
cmdModuleName = utils.camelCase(cmdModuleName);
dest = utils.hyphenate(cmdModuleName.concat("/"));
var vals = {
appName: utils.camelCase(utils.getAppName()),
name: cmdModuleName,
pName: utils.pascalCase(cmdModuleName),
hName: utils.hyphenate(cmdModuleName),
ctrlAlias: program.ctrlAlias,
decoratorPath: utils.getDecoratorPath(dest),
tplPath: utils.getRelativePath()
};
var tpls = utils.readTemplates("module");
tpls = utils.compileTemplates(tpls, vals, cmdModuleName);
utils.writeFiles(tpls, dest);
|
#!/usr/bin/env node
const program = require("commander");
const _ = require("lodash");
const utils = require("./utils");
var dest = "";
program
.usage("<module-name> [options]")
.arguments("<module-name>")
.action(function (moduleName) {
cmdModuleName = moduleName;
})
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
.option("-m, --module-only", "Creates only the module.ts file.")
.parse(process.argv);
if (typeof cmdModuleName === "undefined") {
console.log("You must specify a name for the module.");
process.exit(1);
}
program.ctrlAlias = program.ctrlAlias || "vm";
cmdModuleName = utils.camelCase(cmdModuleName);
dest = utils.hyphenate(cmdModuleName.concat("/"));
var vals = {
appName: utils.camelCase(utils.getAppName()),
name: cmdModuleName,
pName: utils.pascalCase(cmdModuleName),
hName: utils.hyphenate(cmdModuleName),
ctrlAlias: program.ctrlAlias,
decoratorPath: utils.getDecoratorPath(dest),
tplPath: utils.getRelativePath()
};
if (program.moduleOnly) {
var tpls = utils.readTemplate("module", "_name.ts");
} else {
var tpls = utils.readTemplates("module");
}
tpls = utils.compileTemplates(tpls, vals, cmdModuleName);
utils.writeFiles(tpls, dest);
| 6
| 1
| 2
|
mixed
|
--- a/lib/ngts-module.js
+++ b/lib/ngts-module.js
@@ -14,2 +14,3 @@
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
+ .option("-m, --module-only", "Creates only the module.ts file.")
.parse(process.argv);
@@ -37,3 +38,7 @@
-var tpls = utils.readTemplates("module");
+if (program.moduleOnly) {
+ var tpls = utils.readTemplate("module", "_name.ts");
+} else {
+ var tpls = utils.readTemplates("module");
+}
|
--- a/lib/ngts-module.js
+++ b/lib/ngts-module.js
@@ ... @@
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
+ .option("-m, --module-only", "Creates only the module.ts file.")
.parse(process.argv);
@@ ... @@
-var tpls = utils.readTemplates("module");
+if (program.moduleOnly) {
+ var tpls = utils.readTemplate("module", "_name.ts");
+} else {
+ var tpls = utils.readTemplates("module");
+}
|
--- a/lib/ngts-module.js
+++ b/lib/ngts-module.js
@@ -14,2 +14,3 @@
CON .option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
ADD .option("-m, --module-only", "Creates only the module.ts file.")
CON .parse(process.argv);
@@ -37,3 +38,7 @@
CON
DEL var tpls = utils.readTemplates("module");
ADD if (program.moduleOnly) {
ADD var tpls = utils.readTemplate("module", "_name.ts");
ADD } else {
ADD var tpls = utils.readTemplates("module");
ADD }
CON
|
<<<<<<< SEARCH
})
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
.parse(process.argv);
=======
})
.option("-a, --ctrl-alias <alias>", "Sets an alias for the controller. Defaults to vm.")
.option("-m, --module-only", "Creates only the module.ts file.")
.parse(process.argv);
>>>>>>> REPLACE
<<<<<<< SEARCH
};
var tpls = utils.readTemplates("module");
tpls = utils.compileTemplates(tpls, vals, cmdModuleName);
=======
};
if (program.moduleOnly) {
var tpls = utils.readTemplate("module", "_name.ts");
} else {
var tpls = utils.readTemplates("module");
}
tpls = utils.compileTemplates(tpls, vals, cmdModuleName);
>>>>>>> REPLACE
|
fanart-tv/fanarttv-discord-update-bot
|
e08e0072166bec2fa84c507dd6bcb111c1fbbf24
|
src/main/kotlin/tv/fanart/bot/FanartBot.kt
|
kotlin
|
mit
|
NOGH: Add translation config launch strategy
|
package tv.fanart.bot
import kotlinx.coroutines.*
import org.koin.core.KoinComponent
import org.koin.core.inject
import tv.fanart.config.ConfigRepo
import java.lang.Runnable
import java.util.*
class FanartBot : KoinComponent {
private val configurationClient by inject<ConfigRepo>()
private val mainJob = SupervisorJob()
private val mainContext = Dispatchers.Main + mainJob
suspend fun start() = coroutineScope {
configurationClient.updateConfig?.let { updateConfig ->
launch(mainContext) {
val updateBot = UpdateBot(updateConfig)
while (true) {
updateBot.update(Date(updateConfig.lastUpdate))?.let {
configurationClient.updateConfig(updateConfig.copy(lastUpdate = it.time))
}
delay(updateConfig.delay)
}
}
}
// TODO Spawn off translation bot
launch(mainContext) {
}
Runtime.getRuntime().addShutdownHook(Thread(Runnable {
runBlocking {
mainJob.cancelAndJoin()
}
}))
yield()
}
}
|
package tv.fanart.bot
import kotlinx.coroutines.*
import org.koin.core.KoinComponent
import org.koin.core.inject
import tv.fanart.config.ConfigRepo
import java.lang.Runnable
import java.util.*
class FanartBot : KoinComponent {
private val configurationClient by inject<ConfigRepo>()
private val mainJob = SupervisorJob()
private val mainContext = Dispatchers.Main + mainJob
suspend fun start() = coroutineScope {
configurationClient.updateConfig?.let { updateConfig ->
launch(mainContext) {
val updateBot = UpdateBot(updateConfig)
while (true) {
updateBot.update(Date(updateConfig.lastUpdate))?.let {
configurationClient.updateConfig(updateConfig.copy(lastUpdate = it.time))
}
delay(updateConfig.delay)
}
}
}
configurationClient.translationConfig?.let {
launch(mainContext) {
}
}
Runtime.getRuntime().addShutdownHook(Thread(Runnable {
runBlocking {
mainJob.cancelAndJoin()
}
}))
yield()
}
}
| 3
| 2
| 1
|
mixed
|
--- a/src/main/kotlin/tv/fanart/bot/FanartBot.kt
+++ b/src/main/kotlin/tv/fanart/bot/FanartBot.kt
@@ -30,4 +30,5 @@
- // TODO Spawn off translation bot
- launch(mainContext) {
+ configurationClient.translationConfig?.let {
+ launch(mainContext) {
+ }
}
|
--- a/src/main/kotlin/tv/fanart/bot/FanartBot.kt
+++ b/src/main/kotlin/tv/fanart/bot/FanartBot.kt
@@ ... @@
- // TODO Spawn off translation bot
- launch(mainContext) {
+ configurationClient.translationConfig?.let {
+ launch(mainContext) {
+ }
}
|
--- a/src/main/kotlin/tv/fanart/bot/FanartBot.kt
+++ b/src/main/kotlin/tv/fanart/bot/FanartBot.kt
@@ -30,4 +30,5 @@
CON
DEL // TODO Spawn off translation bot
DEL launch(mainContext) {
ADD configurationClient.translationConfig?.let {
ADD launch(mainContext) {
ADD }
CON }
|
<<<<<<< SEARCH
}
// TODO Spawn off translation bot
launch(mainContext) {
}
=======
}
configurationClient.translationConfig?.let {
launch(mainContext) {
}
}
>>>>>>> REPLACE
|
gbouvignies/chemex
|
2965891b46e89e0d7222ec16a2327f2bdef86f52
|
chemex/util.py
|
python
|
bsd-3-clause
|
Update settings for reading config files
Update the definition of comments, now only allowing the use of "#"
for comments. Add a converter function to parse list of floats,
such as:
list_of_floats = [1.0, 2.0, 3.0]
|
"""The util module contains a variety of utility functions."""
import configparser
import sys
def read_cfg_file(filename):
"""Read and parse the experiment configuration file with configparser."""
config = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
config.optionxform = str
try:
out = config.read(str(filename))
if not out and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
except configparser.MissingSectionHeaderError:
exit(f"\nERROR: You are missing a section heading in {filename:s}\n")
except configparser.ParsingError:
exit(
"\nERROR: Having trouble reading your parameter file, did you"
" forget '=' signs?\n{:s}".format(sys.exc_info()[1])
)
return config
def normalize_path(working_dir, filename):
"""Normalize the path of a filename relative to a specific directory."""
path = filename
if not path.is_absolute():
path = working_dir / path
return path.resolve()
def header1(string):
"""Print a formatted heading."""
print(("\n".join(["", "", string, "=" * len(string), ""])))
def header2(string):
"""Print a formatted subheading."""
print(("\n".join(["", string, "-" * len(string), ""])))
|
"""The util module contains a variety of utility functions."""
import configparser
import sys
def listfloat(text):
return [float(val) for val in text.strip("[]").split(",")]
def read_cfg_file(filename=None):
"""Read and parse the experiment configuration file with configparser."""
config = configparser.ConfigParser(
comment_prefixes="#", inline_comment_prefixes="#", converters=listfloat
)
config.optionxform = str
try:
result = config.read(str(filename))
if not result and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
except configparser.MissingSectionHeaderError:
exit(f"\nERROR: You are missing a section heading in {filename:s}\n")
except configparser.ParsingError:
exit(
"\nERROR: Having trouble reading your parameter file, did you"
" forget '=' signs?\n{:s}".format(sys.exc_info()[1])
)
return config
def normalize_path(working_dir, filename):
"""Normalize the path of a filename relative to a specific directory."""
path = filename
if not path.is_absolute():
path = working_dir / path
return path.resolve()
def header1(string):
"""Print a formatted heading."""
print(("\n".join(["", "", string, "=" * len(string), ""])))
def header2(string):
"""Print a formatted subheading."""
print(("\n".join(["", string, "-" * len(string), ""])))
| 10
| 4
| 2
|
mixed
|
--- a/chemex/util.py
+++ b/chemex/util.py
@@ -5,6 +5,12 @@
-def read_cfg_file(filename):
+def listfloat(text):
+ return [float(val) for val in text.strip("[]").split(",")]
+
+
+def read_cfg_file(filename=None):
"""Read and parse the experiment configuration file with configparser."""
- config = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
+ config = configparser.ConfigParser(
+ comment_prefixes="#", inline_comment_prefixes="#", converters=listfloat
+ )
config.optionxform = str
@@ -12,5 +18,5 @@
try:
- out = config.read(str(filename))
+ result = config.read(str(filename))
- if not out and filename is not None:
+ if not result and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
|
--- a/chemex/util.py
+++ b/chemex/util.py
@@ ... @@
-def read_cfg_file(filename):
+def listfloat(text):
+ return [float(val) for val in text.strip("[]").split(",")]
+
+
+def read_cfg_file(filename=None):
"""Read and parse the experiment configuration file with configparser."""
- config = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
+ config = configparser.ConfigParser(
+ comment_prefixes="#", inline_comment_prefixes="#", converters=listfloat
+ )
config.optionxform = str
@@ ... @@
try:
- out = config.read(str(filename))
+ result = config.read(str(filename))
- if not out and filename is not None:
+ if not result and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
|
--- a/chemex/util.py
+++ b/chemex/util.py
@@ -5,6 +5,12 @@
CON
DEL def read_cfg_file(filename):
ADD def listfloat(text):
ADD return [float(val) for val in text.strip("[]").split(",")]
ADD
ADD
ADD def read_cfg_file(filename=None):
CON """Read and parse the experiment configuration file with configparser."""
CON
DEL config = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
ADD config = configparser.ConfigParser(
ADD comment_prefixes="#", inline_comment_prefixes="#", converters=listfloat
ADD )
CON config.optionxform = str
@@ -12,5 +18,5 @@
CON try:
DEL out = config.read(str(filename))
ADD result = config.read(str(filename))
CON
DEL if not out and filename is not None:
ADD if not result and filename is not None:
CON exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
|
<<<<<<< SEARCH
def read_cfg_file(filename):
"""Read and parse the experiment configuration file with configparser."""
config = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
config.optionxform = str
try:
out = config.read(str(filename))
if not out and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
=======
def listfloat(text):
return [float(val) for val in text.strip("[]").split(",")]
def read_cfg_file(filename=None):
"""Read and parse the experiment configuration file with configparser."""
config = configparser.ConfigParser(
comment_prefixes="#", inline_comment_prefixes="#", converters=listfloat
)
config.optionxform = str
try:
result = config.read(str(filename))
if not result and filename is not None:
exit(f"\nERROR: The file '{filename}' is empty or does not exist!\n")
>>>>>>> REPLACE
|
atengler/kqueen-ui
|
f9ca473abf7aea3cc146badf2d45ae715f635aac
|
kqueen_ui/server.py
|
python
|
mit
|
Use correct parameter for HOST and PORT
|
from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('KQUEEN_UI_HOST'),
port=int(app.config.get('KQUEEN_UI_PORT'))
)
|
from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('HOST'),
port=int(app.config.get('PORT'))
)
| 2
| 2
| 1
|
mixed
|
--- a/kqueen_ui/server.py
+++ b/kqueen_ui/server.py
@@ -45,4 +45,4 @@
app.run(
- host=app.config.get('KQUEEN_UI_HOST'),
- port=int(app.config.get('KQUEEN_UI_PORT'))
+ host=app.config.get('HOST'),
+ port=int(app.config.get('PORT'))
)
|
--- a/kqueen_ui/server.py
+++ b/kqueen_ui/server.py
@@ ... @@
app.run(
- host=app.config.get('KQUEEN_UI_HOST'),
- port=int(app.config.get('KQUEEN_UI_PORT'))
+ host=app.config.get('HOST'),
+ port=int(app.config.get('PORT'))
)
|
--- a/kqueen_ui/server.py
+++ b/kqueen_ui/server.py
@@ -45,4 +45,4 @@
CON app.run(
DEL host=app.config.get('KQUEEN_UI_HOST'),
DEL port=int(app.config.get('KQUEEN_UI_PORT'))
ADD host=app.config.get('HOST'),
ADD port=int(app.config.get('PORT'))
CON )
|
<<<<<<< SEARCH
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('KQUEEN_UI_HOST'),
port=int(app.config.get('KQUEEN_UI_PORT'))
)
=======
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('HOST'),
port=int(app.config.get('PORT'))
)
>>>>>>> REPLACE
|
EricCat/Node-File-Delete
|
e5da5488cd2300200ca94441602e638312224974
|
index.js
|
javascript
|
bsd-3-clause
|
Modify Async && Sync Delete
|
"use strict";
var fs = require('fs');
exports.deleteFileSync = function(filePath, timeInterval) {
function iterator(filePath, dirs) {
var stat = fs.stat(filePath);
if(stat.isDirectory()) {
dirs.unshift(filePath);//collection dirs
inner(filePath, dirs);
} else if(stat.isFile()){
fs.unlinkSync(filePath);//delete file
}
}
function inner(path, dirs){
var arr = fs.readdirSync(path);
for(var i = 0, el ; el = arr[i++];){
iterator(path+"/"+el,dirs);
}
}
return setInterval(function(path, cb){
cb = cb || function(){};
var dirs = [];
try{
iterator(path, dirs);
for(var i = 0, el ; el = dirs[i++];){
fs.rmdirSync(el);//delete all collection dirs
}
cb()
}catch(e){
e.code === "ENOENT" ? cb() : cb(e);
}
}, timeInterval*1000);
};
|
"use strict";
var fs = require('fs');
exports.deleteFileSync = function(filePath, timeInterval, callback) {
function iterator(filePath, dirs, callback) {
fs.stat(filePath, function(err, stats){
if(err){
if (err.message === 'No such file or directory') {
// Ignore file not found errors and return an empty result
callback(null, "");
} else {
// Pass other errors through as is
callback(err);
}
} else{
if(stats.isDirectory()) {
dirs.unshift(filePath);//collection dirs
inner(filePath, dirs);
} else if(stats.isFile()){
fs.unlinkSync(filePath);//delete file
}
}
})
}
function inner(path, dirs){
var arr = fs.readdirSync(path);
for(var i = 0, el ; el = arr[i++];){
iterator(path+"/"+el,dirs);
}
}
var ex = function(path, cb){
cb = cb || function(){};
var dirs = [];
try{
iterator(path, dirs, function(err, data){
if(err) cb(err);
for(var i = 0, el ; el = dirs[i++];){
fs.rmdirSync(el);//delete all collection dirs
}
cb(data);
});
}catch(e){
e.code === "ENOENT" ? cb() : cb(e);
}
};
return setInterval(ex(filePath, callback), timeInterval*1000);
};
| 32
| 16
| 4
|
mixed
|
--- a/index.js
+++ b/index.js
@@ -5,11 +5,22 @@
-exports.deleteFileSync = function(filePath, timeInterval) {
- function iterator(filePath, dirs) {
- var stat = fs.stat(filePath);
- if(stat.isDirectory()) {
- dirs.unshift(filePath);//collection dirs
- inner(filePath, dirs);
- } else if(stat.isFile()){
- fs.unlinkSync(filePath);//delete file
- }
+exports.deleteFileSync = function(filePath, timeInterval, callback) {
+ function iterator(filePath, dirs, callback) {
+ fs.stat(filePath, function(err, stats){
+ if(err){
+ if (err.message === 'No such file or directory') {
+ // Ignore file not found errors and return an empty result
+ callback(null, "");
+ } else {
+ // Pass other errors through as is
+ callback(err);
+ }
+ } else{
+ if(stats.isDirectory()) {
+ dirs.unshift(filePath);//collection dirs
+ inner(filePath, dirs);
+ } else if(stats.isFile()){
+ fs.unlinkSync(filePath);//delete file
+ }
+ }
+ })
}
@@ -22,3 +33,4 @@
}
- return setInterval(function(path, cb){
+
+ var ex = function(path, cb){
cb = cb || function(){};
@@ -27,7 +39,10 @@
try{
- iterator(path, dirs);
- for(var i = 0, el ; el = dirs[i++];){
- fs.rmdirSync(el);//delete all collection dirs
- }
- cb()
+ iterator(path, dirs, function(err, data){
+ if(err) cb(err);
+ for(var i = 0, el ; el = dirs[i++];){
+ fs.rmdirSync(el);//delete all collection dirs
+ }
+ cb(data);
+ });
+
}catch(e){
@@ -35,3 +50,4 @@
}
- }, timeInterval*1000);
+ };
+ return setInterval(ex(filePath, callback), timeInterval*1000);
};
|
--- a/index.js
+++ b/index.js
@@ ... @@
-exports.deleteFileSync = function(filePath, timeInterval) {
- function iterator(filePath, dirs) {
- var stat = fs.stat(filePath);
- if(stat.isDirectory()) {
- dirs.unshift(filePath);//collection dirs
- inner(filePath, dirs);
- } else if(stat.isFile()){
- fs.unlinkSync(filePath);//delete file
- }
+exports.deleteFileSync = function(filePath, timeInterval, callback) {
+ function iterator(filePath, dirs, callback) {
+ fs.stat(filePath, function(err, stats){
+ if(err){
+ if (err.message === 'No such file or directory') {
+ // Ignore file not found errors and return an empty result
+ callback(null, "");
+ } else {
+ // Pass other errors through as is
+ callback(err);
+ }
+ } else{
+ if(stats.isDirectory()) {
+ dirs.unshift(filePath);//collection dirs
+ inner(filePath, dirs);
+ } else if(stats.isFile()){
+ fs.unlinkSync(filePath);//delete file
+ }
+ }
+ })
}
@@ ... @@
}
- return setInterval(function(path, cb){
+
+ var ex = function(path, cb){
cb = cb || function(){};
@@ ... @@
try{
- iterator(path, dirs);
- for(var i = 0, el ; el = dirs[i++];){
- fs.rmdirSync(el);//delete all collection dirs
- }
- cb()
+ iterator(path, dirs, function(err, data){
+ if(err) cb(err);
+ for(var i = 0, el ; el = dirs[i++];){
+ fs.rmdirSync(el);//delete all collection dirs
+ }
+ cb(data);
+ });
+
}catch(e){
@@ ... @@
}
- }, timeInterval*1000);
+ };
+ return setInterval(ex(filePath, callback), timeInterval*1000);
};
|
--- a/index.js
+++ b/index.js
@@ -5,11 +5,22 @@
CON
DEL exports.deleteFileSync = function(filePath, timeInterval) {
DEL function iterator(filePath, dirs) {
DEL var stat = fs.stat(filePath);
DEL if(stat.isDirectory()) {
DEL dirs.unshift(filePath);//collection dirs
DEL inner(filePath, dirs);
DEL } else if(stat.isFile()){
DEL fs.unlinkSync(filePath);//delete file
DEL }
ADD exports.deleteFileSync = function(filePath, timeInterval, callback) {
ADD function iterator(filePath, dirs, callback) {
ADD fs.stat(filePath, function(err, stats){
ADD if(err){
ADD if (err.message === 'No such file or directory') {
ADD // Ignore file not found errors and return an empty result
ADD callback(null, "");
ADD } else {
ADD // Pass other errors through as is
ADD callback(err);
ADD }
ADD } else{
ADD if(stats.isDirectory()) {
ADD dirs.unshift(filePath);//collection dirs
ADD inner(filePath, dirs);
ADD } else if(stats.isFile()){
ADD fs.unlinkSync(filePath);//delete file
ADD }
ADD }
ADD })
CON }
@@ -22,3 +33,4 @@
CON }
DEL return setInterval(function(path, cb){
ADD
ADD var ex = function(path, cb){
CON cb = cb || function(){};
@@ -27,7 +39,10 @@
CON try{
DEL iterator(path, dirs);
DEL for(var i = 0, el ; el = dirs[i++];){
DEL fs.rmdirSync(el);//delete all collection dirs
DEL }
DEL cb()
ADD iterator(path, dirs, function(err, data){
ADD if(err) cb(err);
ADD for(var i = 0, el ; el = dirs[i++];){
ADD fs.rmdirSync(el);//delete all collection dirs
ADD }
ADD cb(data);
ADD });
ADD
CON }catch(e){
@@ -35,3 +50,4 @@
CON }
DEL }, timeInterval*1000);
ADD };
ADD return setInterval(ex(filePath, callback), timeInterval*1000);
CON };
|
<<<<<<< SEARCH
var fs = require('fs');
exports.deleteFileSync = function(filePath, timeInterval) {
function iterator(filePath, dirs) {
var stat = fs.stat(filePath);
if(stat.isDirectory()) {
dirs.unshift(filePath);//collection dirs
inner(filePath, dirs);
} else if(stat.isFile()){
fs.unlinkSync(filePath);//delete file
}
}
=======
var fs = require('fs');
exports.deleteFileSync = function(filePath, timeInterval, callback) {
function iterator(filePath, dirs, callback) {
fs.stat(filePath, function(err, stats){
if(err){
if (err.message === 'No such file or directory') {
// Ignore file not found errors and return an empty result
callback(null, "");
} else {
// Pass other errors through as is
callback(err);
}
} else{
if(stats.isDirectory()) {
dirs.unshift(filePath);//collection dirs
inner(filePath, dirs);
} else if(stats.isFile()){
fs.unlinkSync(filePath);//delete file
}
}
})
}
>>>>>>> REPLACE
<<<<<<< SEARCH
}
}
return setInterval(function(path, cb){
cb = cb || function(){};
var dirs = [];
try{
iterator(path, dirs);
for(var i = 0, el ; el = dirs[i++];){
fs.rmdirSync(el);//delete all collection dirs
}
cb()
}catch(e){
e.code === "ENOENT" ? cb() : cb(e);
}
}, timeInterval*1000);
};
=======
}
}
var ex = function(path, cb){
cb = cb || function(){};
var dirs = [];
try{
iterator(path, dirs, function(err, data){
if(err) cb(err);
for(var i = 0, el ; el = dirs[i++];){
fs.rmdirSync(el);//delete all collection dirs
}
cb(data);
});
}catch(e){
e.code === "ENOENT" ? cb() : cb(e);
}
};
return setInterval(ex(filePath, callback), timeInterval*1000);
};
>>>>>>> REPLACE
|
llwanghong/jslint4java
|
91a03a93ca7575e79e17f52618f5705c66d5f144
|
src/net/happygiraffe/jslint/Main.java
|
java
|
bsd-2-clause
|
Print out a file not found message instead of blowing up.
|
package net.happygiraffe.jslint;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.List;
/**
* A command line interface to {@link JSLint}.
*
* @author dom
* @version $Id$
*/
public class Main {
/**
* The main entry point. Try passing in "--help" for more details.
*
* @param args One or more JavaScript files.
* @throws IOException
*/
public static void main(String[] args) throws IOException {
JSLint lint = new JSLint();
lint.addOption(Option.EQEQEQ);
lint.addOption(Option.UNDEF);
lint.addOption(Option.WHITE);
for (String file : args) {
BufferedReader reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file)));
List<Issue> issues = lint.lint(file, reader);
for (Issue issue : issues) {
System.err.println(issue);
}
}
}
}
|
package net.happygiraffe.jslint;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.List;
/**
* A command line interface to {@link JSLint}.
*
* @author dom
* @version $Id$
*/
public class Main {
/**
* The main entry point. Try passing in "--help" for more details.
*
* @param args
* One or more JavaScript files.
* @throws IOException
*/
public static void main(String[] args) throws IOException {
JSLint lint = new JSLint();
lint.addOption(Option.EQEQEQ);
lint.addOption(Option.UNDEF);
lint.addOption(Option.WHITE);
for (String file : args) {
lintFile(lint, file);
}
}
private static void lintFile(JSLint lint, String file) throws IOException {
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file)));
List<Issue> issues = lint.lint(file, reader);
for (Issue issue : issues) {
System.err.println(issue);
}
} catch (FileNotFoundException e) {
System.err.println(file + ":no such file");
}
}
}
| 11
| 1
| 4
|
mixed
|
--- a/src/net/happygiraffe/jslint/Main.java
+++ b/src/net/happygiraffe/jslint/Main.java
@@ -4,2 +4,3 @@
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
@@ -19,3 +20,4 @@
*
- * @param args One or more JavaScript files.
+ * @param args
+ * One or more JavaScript files.
* @throws IOException
@@ -28,2 +30,8 @@
for (String file : args) {
+ lintFile(lint, file);
+ }
+ }
+
+ private static void lintFile(JSLint lint, String file) throws IOException {
+ try {
BufferedReader reader = new BufferedReader(new InputStreamReader(
@@ -34,2 +42,4 @@
}
+ } catch (FileNotFoundException e) {
+ System.err.println(file + ":no such file");
}
|
--- a/src/net/happygiraffe/jslint/Main.java
+++ b/src/net/happygiraffe/jslint/Main.java
@@ ... @@
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
@@ ... @@
*
- * @param args One or more JavaScript files.
+ * @param args
+ * One or more JavaScript files.
* @throws IOException
@@ ... @@
for (String file : args) {
+ lintFile(lint, file);
+ }
+ }
+
+ private static void lintFile(JSLint lint, String file) throws IOException {
+ try {
BufferedReader reader = new BufferedReader(new InputStreamReader(
@@ ... @@
}
+ } catch (FileNotFoundException e) {
+ System.err.println(file + ":no such file");
}
|
--- a/src/net/happygiraffe/jslint/Main.java
+++ b/src/net/happygiraffe/jslint/Main.java
@@ -4,2 +4,3 @@
CON import java.io.FileInputStream;
ADD import java.io.FileNotFoundException;
CON import java.io.IOException;
@@ -19,3 +20,4 @@
CON *
DEL * @param args One or more JavaScript files.
ADD * @param args
ADD * One or more JavaScript files.
CON * @throws IOException
@@ -28,2 +30,8 @@
CON for (String file : args) {
ADD lintFile(lint, file);
ADD }
ADD }
ADD
ADD private static void lintFile(JSLint lint, String file) throws IOException {
ADD try {
CON BufferedReader reader = new BufferedReader(new InputStreamReader(
@@ -34,2 +42,4 @@
CON }
ADD } catch (FileNotFoundException e) {
ADD System.err.println(file + ":no such file");
CON }
|
<<<<<<< SEARCH
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
=======
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
>>>>>>> REPLACE
<<<<<<< SEARCH
* The main entry point. Try passing in "--help" for more details.
*
* @param args One or more JavaScript files.
* @throws IOException
*/
=======
* The main entry point. Try passing in "--help" for more details.
*
* @param args
* One or more JavaScript files.
* @throws IOException
*/
>>>>>>> REPLACE
<<<<<<< SEARCH
lint.addOption(Option.WHITE);
for (String file : args) {
BufferedReader reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file)));
=======
lint.addOption(Option.WHITE);
for (String file : args) {
lintFile(lint, file);
}
}
private static void lintFile(JSLint lint, String file) throws IOException {
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file)));
>>>>>>> REPLACE
<<<<<<< SEARCH
System.err.println(issue);
}
}
}
=======
System.err.println(issue);
}
} catch (FileNotFoundException e) {
System.err.println(file + ":no such file");
}
}
>>>>>>> REPLACE
|
bink81/java-experiments
|
58515da9800f7b9815ad93d3f53ff9c6a23a221b
|
src/main/java/algorithms/trees/TreeHeightCalculator.java
|
java
|
mit
|
Document prerequisites for the algorithm
|
package algorithms.trees;
public class TreeHeightCalculator {
private final int parents[];
/**
* @param parents
* defines index of parent for each node. Value -1 determines a root node
*/
public TreeHeightCalculator(int parents[]) {
this.parents = parents;
}
public int computeHeight() {
TreeNode root = createTree();
return recursiveHeightCalculation(root);
}
private TreeNode createTree() {
TreeNode[] nodes = new TreeNode[parents.length];
for (int i = 0; i < parents.length; i++) {
nodes[i] = new TreeNode();
}
TreeNode root = null;
for (int i = 0; i < parents.length; i++) {
int parentId = parents[i];
if (parentId >= 0) {
nodes[i].setParent(nodes[parentId]);
nodes[parentId].getChildren().add(nodes[i]);
} else {
root = nodes[i];
}
}
return root;
}
private int recursiveHeightCalculation(TreeNode treeNode) {
int max = 0;
for (TreeNode child : treeNode.getChildren()) {
int newMax = recursiveHeightCalculation(child);
if (newMax > max) {
max = newMax;
}
}
return ++max;
}
}
|
package algorithms.trees;
public class TreeHeightCalculator {
private final int parents[];
/**
* @param parents
* defines index of parent for each node. Value -1 determines a root node
*/
public TreeHeightCalculator(int parents[]) {
this.parents = parents;
}
/**
* assumptions: there is exactly one root and the input represents a tree
*/
public int computeHeight() {
TreeNode root = createTree();
return recursiveHeightCalculation(root);
}
private TreeNode createTree() {
TreeNode[] nodes = new TreeNode[parents.length];
for (int i = 0; i < parents.length; i++) {
nodes[i] = new TreeNode();
}
TreeNode root = null;
for (int i = 0; i < parents.length; i++) {
int parentId = parents[i];
if (parentId >= 0) {
nodes[i].setParent(nodes[parentId]);
nodes[parentId].getChildren().add(nodes[i]);
} else {
root = nodes[i];
}
}
return root;
}
private int recursiveHeightCalculation(TreeNode treeNode) {
int max = 0;
for (TreeNode child : treeNode.getChildren()) {
int newMax = recursiveHeightCalculation(child);
if (newMax > max) {
max = newMax;
}
}
return ++max;
}
}
| 3
| 0
| 1
|
add_only
|
--- a/src/main/java/algorithms/trees/TreeHeightCalculator.java
+++ b/src/main/java/algorithms/trees/TreeHeightCalculator.java
@@ -13,2 +13,5 @@
+ /**
+ * assumptions: there is exactly one root and the input represents a tree
+ */
public int computeHeight() {
|
--- a/src/main/java/algorithms/trees/TreeHeightCalculator.java
+++ b/src/main/java/algorithms/trees/TreeHeightCalculator.java
@@ ... @@
+ /**
+ * assumptions: there is exactly one root and the input represents a tree
+ */
public int computeHeight() {
|
--- a/src/main/java/algorithms/trees/TreeHeightCalculator.java
+++ b/src/main/java/algorithms/trees/TreeHeightCalculator.java
@@ -13,2 +13,5 @@
CON
ADD /**
ADD * assumptions: there is exactly one root and the input represents a tree
ADD */
CON public int computeHeight() {
|
<<<<<<< SEARCH
}
public int computeHeight() {
TreeNode root = createTree();
=======
}
/**
* assumptions: there is exactly one root and the input represents a tree
*/
public int computeHeight() {
TreeNode root = createTree();
>>>>>>> REPLACE
|
brejoc/django-intercoolerjs
|
3ca2203a977f6d25c780e7a6168a16c4f7dec732
|
setup.py
|
python
|
mit
|
Switch to proper markdown for long description
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
| 3
| 5
| 2
|
mixed
|
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,4 @@
-try:
- import pypandoc
- long_description = pypandoc.convert('README.md', 'rst')
-except (ImportError, OSError):
- long_description = open('README.md').read()
+with open('README.md', encoding='utf-8') as f:
+ long_description = f.read()
@@ -22,2 +19,3 @@
long_description=long_description,
+ long_description_content_type='text/markdown',
author='Jochen Breuer',
|
--- a/setup.py
+++ b/setup.py
@@ ... @@
-try:
- import pypandoc
- long_description = pypandoc.convert('README.md', 'rst')
-except (ImportError, OSError):
- long_description = open('README.md').read()
+with open('README.md', encoding='utf-8') as f:
+ long_description = f.read()
@@ ... @@
long_description=long_description,
+ long_description_content_type='text/markdown',
author='Jochen Breuer',
|
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,4 @@
CON
DEL try:
DEL import pypandoc
DEL long_description = pypandoc.convert('README.md', 'rst')
DEL except (ImportError, OSError):
DEL long_description = open('README.md').read()
ADD with open('README.md', encoding='utf-8') as f:
ADD long_description = f.read()
CON
@@ -22,2 +19,3 @@
CON long_description=long_description,
ADD long_description_content_type='text/markdown',
CON author='Jochen Breuer',
|
<<<<<<< SEARCH
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
=======
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
>>>>>>> REPLACE
<<<<<<< SEARCH
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
=======
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
>>>>>>> REPLACE
|
mg4tv/mg4tv-web
|
91f3e6cdbd82cc5ca7927fd1dd0380d7fc5efef4
|
src/components/GroupMembersView.js
|
javascript
|
mit
|
Update format for group members view
|
import _ from 'lodash'
import React from 'react'
import {connect} from 'react-redux'
import {compose, withHandlers, withProps, withState} from 'recompose'
import GroupMembersList from './GroupMembersList'
import {addMemberToGroup} from '../actions/groups'
const mapStateToProps = ({groups}) => ({
groups
})
const enhance = compose(
connect(mapStateToProps),
withProps(({match}) => ({
groupId: match.params.groupId
})),
withState('newMember', 'updateNewMember', ''),
withHandlers({
onNewMemberChange: props => event => {
props.updateNewMember(event.target.value)
},
onNewGroupMemberSubmit: props => event => {
event.preventDefault()
props.dispatch(addMemberToGroup({
groupId: props.groupId,
memberId: props.newMember,
}))
}
}),
)
const GroupMembersView = ({groups, groupId, onNewGroupMemberSubmit, onNewMemberChange, newMember}) => (
<div>
<GroupMembersList
groupId={groupId}
memberIds={
Object.keys(
_.get(groups, `${groupId}.members`, {})
)
}
/>
<form onSubmit={onNewGroupMemberSubmit}>
<textarea
placeholder='Add New Member'
value={newMember}
onChange={onNewMemberChange}
/>
<input
type='submit'
value='submit'
/>
</form>
</div>
)
export default enhance(GroupMembersView)
|
import _ from 'lodash'
import React from 'react'
import {connect} from 'react-redux'
import {compose, withHandlers, withProps, withState} from 'recompose'
import GroupMembersList from './GroupMembersList'
import {addMemberToGroup} from '../actions/groups'
const mapStateToProps = ({groups}) => ({
groups
})
const enhance = compose(
connect(mapStateToProps),
withProps(({match}) => ({
groupId: match.params.groupId
})),
withState('newGroupMember', 'updateNewGroupMember', ''),
withHandlers({
onNewGroupMemberChange: props => event => {
props.updateNewGroupMember(event.target.value)
},
onNewGroupMemberSubmit: props => event => {
event.preventDefault()
props.dispatch(addMemberToGroup({
groupId: props.groupId,
memberId: props.newGroupMember,
}))
}
}),
)
const GroupMembersView = ({
groups,
groupId,
onNewGroupMemberSubmit,
onNewGroupMemberChange,
newGroupMember,
}) => (
<div>
<GroupMembersList
groupId={groupId}
memberIds={
Object.keys(
_.get(groups, `${groupId}.members`, {})
)
}
/>
<form onSubmit={onNewGroupMemberSubmit}>
<textarea
placeholder='Add New Member'
value={newGroupMember}
onChange={onNewGroupMemberChange}
/>
<input
type='submit'
value='submit'
/>
</form>
</div>
)
export default enhance(GroupMembersView)
| 13
| 7
| 4
|
mixed
|
--- a/src/components/GroupMembersView.js
+++ b/src/components/GroupMembersView.js
@@ -17,6 +17,6 @@
})),
- withState('newMember', 'updateNewMember', ''),
+ withState('newGroupMember', 'updateNewGroupMember', ''),
withHandlers({
- onNewMemberChange: props => event => {
- props.updateNewMember(event.target.value)
+ onNewGroupMemberChange: props => event => {
+ props.updateNewGroupMember(event.target.value)
},
@@ -26,3 +26,3 @@
groupId: props.groupId,
- memberId: props.newMember,
+ memberId: props.newGroupMember,
}))
@@ -32,3 +32,9 @@
-const GroupMembersView = ({groups, groupId, onNewGroupMemberSubmit, onNewMemberChange, newMember}) => (
+const GroupMembersView = ({
+ groups,
+ groupId,
+ onNewGroupMemberSubmit,
+ onNewGroupMemberChange,
+ newGroupMember,
+ }) => (
<div>
@@ -45,4 +51,4 @@
placeholder='Add New Member'
- value={newMember}
- onChange={onNewMemberChange}
+ value={newGroupMember}
+ onChange={onNewGroupMemberChange}
/>
|
--- a/src/components/GroupMembersView.js
+++ b/src/components/GroupMembersView.js
@@ ... @@
})),
- withState('newMember', 'updateNewMember', ''),
+ withState('newGroupMember', 'updateNewGroupMember', ''),
withHandlers({
- onNewMemberChange: props => event => {
- props.updateNewMember(event.target.value)
+ onNewGroupMemberChange: props => event => {
+ props.updateNewGroupMember(event.target.value)
},
@@ ... @@
groupId: props.groupId,
- memberId: props.newMember,
+ memberId: props.newGroupMember,
}))
@@ ... @@
-const GroupMembersView = ({groups, groupId, onNewGroupMemberSubmit, onNewMemberChange, newMember}) => (
+const GroupMembersView = ({
+ groups,
+ groupId,
+ onNewGroupMemberSubmit,
+ onNewGroupMemberChange,
+ newGroupMember,
+ }) => (
<div>
@@ ... @@
placeholder='Add New Member'
- value={newMember}
- onChange={onNewMemberChange}
+ value={newGroupMember}
+ onChange={onNewGroupMemberChange}
/>
|
--- a/src/components/GroupMembersView.js
+++ b/src/components/GroupMembersView.js
@@ -17,6 +17,6 @@
CON })),
DEL withState('newMember', 'updateNewMember', ''),
ADD withState('newGroupMember', 'updateNewGroupMember', ''),
CON withHandlers({
DEL onNewMemberChange: props => event => {
DEL props.updateNewMember(event.target.value)
ADD onNewGroupMemberChange: props => event => {
ADD props.updateNewGroupMember(event.target.value)
CON },
@@ -26,3 +26,3 @@
CON groupId: props.groupId,
DEL memberId: props.newMember,
ADD memberId: props.newGroupMember,
CON }))
@@ -32,3 +32,9 @@
CON
DEL const GroupMembersView = ({groups, groupId, onNewGroupMemberSubmit, onNewMemberChange, newMember}) => (
ADD const GroupMembersView = ({
ADD groups,
ADD groupId,
ADD onNewGroupMemberSubmit,
ADD onNewGroupMemberChange,
ADD newGroupMember,
ADD }) => (
CON <div>
@@ -45,4 +51,4 @@
CON placeholder='Add New Member'
DEL value={newMember}
DEL onChange={onNewMemberChange}
ADD value={newGroupMember}
ADD onChange={onNewGroupMemberChange}
CON />
|
<<<<<<< SEARCH
groupId: match.params.groupId
})),
withState('newMember', 'updateNewMember', ''),
withHandlers({
onNewMemberChange: props => event => {
props.updateNewMember(event.target.value)
},
onNewGroupMemberSubmit: props => event => {
event.preventDefault()
props.dispatch(addMemberToGroup({
groupId: props.groupId,
memberId: props.newMember,
}))
}
}),
)
const GroupMembersView = ({groups, groupId, onNewGroupMemberSubmit, onNewMemberChange, newMember}) => (
<div>
<GroupMembersList
=======
groupId: match.params.groupId
})),
withState('newGroupMember', 'updateNewGroupMember', ''),
withHandlers({
onNewGroupMemberChange: props => event => {
props.updateNewGroupMember(event.target.value)
},
onNewGroupMemberSubmit: props => event => {
event.preventDefault()
props.dispatch(addMemberToGroup({
groupId: props.groupId,
memberId: props.newGroupMember,
}))
}
}),
)
const GroupMembersView = ({
groups,
groupId,
onNewGroupMemberSubmit,
onNewGroupMemberChange,
newGroupMember,
}) => (
<div>
<GroupMembersList
>>>>>>> REPLACE
<<<<<<< SEARCH
<textarea
placeholder='Add New Member'
value={newMember}
onChange={onNewMemberChange}
/>
<input
=======
<textarea
placeholder='Add New Member'
value={newGroupMember}
onChange={onNewGroupMemberChange}
/>
<input
>>>>>>> REPLACE
|
BirkbeckCTP/janeway
|
1dc1be8c5f705ff97d6b83171327fa5d1c59a385
|
src/utils/management/commands/run_upgrade.py
|
python
|
agpl-3.0
|
Upgrade path is now not required, help text is output if no path supp.
|
from importlib import import_module
from django.core.management.base import BaseCommand
from django.utils import translation
class Command(BaseCommand):
"""
Upgrades Janeway
"""
help = "Upgrades an install from one version to another."
def add_arguments(self, parser):
"""Adds arguments to Django's management command-line parser.
:param parser: the parser to which the required arguments will be added
:return: None
"""
parser.add_argument('upgrade_module')
def handle(self, *args, **options):
translation.activate('en')
upgrade_module_name = options.get('upgrade_module')
upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
try:
upgrade_module = import_module(upgrade_module_path)
upgrade_module.execute()
except ImportError as e:
print('There was an error running the requested upgrade: ')
print(e)
|
import os
from importlib import import_module
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
def get_modules():
path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
root, dirs, files = next(os.walk(path))
return files
class Command(BaseCommand):
"""
Upgrades Janeway
"""
help = "Upgrades an install from one version to another."
def add_arguments(self, parser):
"""Adds arguments to Django's management command-line parser.
:param parser: the parser to which the required arguments will be added
:return: None
"""
parser.add_argument('--path', required=False)
def handle(self, *args, **options):
if not options.get('path'):
print('No upgrade selected. Available upgrade paths: ')
for file in get_modules():
module_name = file.split('.')[0]
print('- {module_name}'.format(module_name=module_name))
print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
else:
translation.activate('en')
upgrade_module_name = options.get('path')
upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
try:
upgrade_module = import_module(upgrade_module_path)
upgrade_module.execute()
except ImportError as e:
print('There was an error running the requested upgrade: ')
print(e)
| 26
| 10
| 3
|
mixed
|
--- a/src/utils/management/commands/run_upgrade.py
+++ b/src/utils/management/commands/run_upgrade.py
@@ -1 +1,2 @@
+import os
from importlib import import_module
@@ -4,2 +5,9 @@
from django.utils import translation
+from django.conf import settings
+
+
+def get_modules():
+ path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
+ root, dirs, files = next(os.walk(path))
+ return files
@@ -19,15 +27,23 @@
"""
- parser.add_argument('upgrade_module')
+ parser.add_argument('--path', required=False)
def handle(self, *args, **options):
- translation.activate('en')
- upgrade_module_name = options.get('upgrade_module')
- upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
- try:
- upgrade_module = import_module(upgrade_module_path)
- upgrade_module.execute()
- except ImportError as e:
- print('There was an error running the requested upgrade: ')
- print(e)
+ if not options.get('path'):
+ print('No upgrade selected. Available upgrade paths: ')
+ for file in get_modules():
+ module_name = file.split('.')[0]
+ print('- {module_name}'.format(module_name=module_name))
+ print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
+ else:
+ translation.activate('en')
+ upgrade_module_name = options.get('path')
+ upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
+ try:
+ upgrade_module = import_module(upgrade_module_path)
+ upgrade_module.execute()
+ except ImportError as e:
+ print('There was an error running the requested upgrade: ')
+ print(e)
+
|
--- a/src/utils/management/commands/run_upgrade.py
+++ b/src/utils/management/commands/run_upgrade.py
@@ ... @@
+import os
from importlib import import_module
@@ ... @@
from django.utils import translation
+from django.conf import settings
+
+
+def get_modules():
+ path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
+ root, dirs, files = next(os.walk(path))
+ return files
@@ ... @@
"""
- parser.add_argument('upgrade_module')
+ parser.add_argument('--path', required=False)
def handle(self, *args, **options):
- translation.activate('en')
- upgrade_module_name = options.get('upgrade_module')
- upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
- try:
- upgrade_module = import_module(upgrade_module_path)
- upgrade_module.execute()
- except ImportError as e:
- print('There was an error running the requested upgrade: ')
- print(e)
+ if not options.get('path'):
+ print('No upgrade selected. Available upgrade paths: ')
+ for file in get_modules():
+ module_name = file.split('.')[0]
+ print('- {module_name}'.format(module_name=module_name))
+ print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
+ else:
+ translation.activate('en')
+ upgrade_module_name = options.get('path')
+ upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
+ try:
+ upgrade_module = import_module(upgrade_module_path)
+ upgrade_module.execute()
+ except ImportError as e:
+ print('There was an error running the requested upgrade: ')
+ print(e)
+
|
--- a/src/utils/management/commands/run_upgrade.py
+++ b/src/utils/management/commands/run_upgrade.py
@@ -1 +1,2 @@
ADD import os
CON from importlib import import_module
@@ -4,2 +5,9 @@
CON from django.utils import translation
ADD from django.conf import settings
ADD
ADD
ADD def get_modules():
ADD path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
ADD root, dirs, files = next(os.walk(path))
ADD return files
CON
@@ -19,15 +27,23 @@
CON """
DEL parser.add_argument('upgrade_module')
ADD parser.add_argument('--path', required=False)
CON
CON def handle(self, *args, **options):
DEL translation.activate('en')
DEL upgrade_module_name = options.get('upgrade_module')
DEL upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
CON
DEL try:
DEL upgrade_module = import_module(upgrade_module_path)
DEL upgrade_module.execute()
DEL except ImportError as e:
DEL print('There was an error running the requested upgrade: ')
DEL print(e)
ADD if not options.get('path'):
ADD print('No upgrade selected. Available upgrade paths: ')
ADD for file in get_modules():
ADD module_name = file.split('.')[0]
ADD print('- {module_name}'.format(module_name=module_name))
ADD print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
ADD else:
ADD translation.activate('en')
ADD upgrade_module_name = options.get('path')
ADD upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
CON
ADD try:
ADD upgrade_module = import_module(upgrade_module_path)
ADD upgrade_module.execute()
ADD except ImportError as e:
ADD print('There was an error running the requested upgrade: ')
ADD print(e)
ADD
|
<<<<<<< SEARCH
from importlib import import_module
from django.core.management.base import BaseCommand
from django.utils import translation
=======
import os
from importlib import import_module
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
def get_modules():
path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade')
root, dirs, files = next(os.walk(path))
return files
>>>>>>> REPLACE
<<<<<<< SEARCH
:return: None
"""
parser.add_argument('upgrade_module')
def handle(self, *args, **options):
translation.activate('en')
upgrade_module_name = options.get('upgrade_module')
upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
try:
upgrade_module = import_module(upgrade_module_path)
upgrade_module.execute()
except ImportError as e:
print('There was an error running the requested upgrade: ')
print(e)
=======
:return: None
"""
parser.add_argument('--path', required=False)
def handle(self, *args, **options):
if not options.get('path'):
print('No upgrade selected. Available upgrade paths: ')
for file in get_modules():
module_name = file.split('.')[0]
print('- {module_name}'.format(module_name=module_name))
print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`')
else:
translation.activate('en')
upgrade_module_name = options.get('path')
upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name)
try:
upgrade_module = import_module(upgrade_module_path)
upgrade_module.execute()
except ImportError as e:
print('There was an error running the requested upgrade: ')
print(e)
>>>>>>> REPLACE
|
mauricionr/passport-twitter
|
93d576981fbac6c0f781b9826f2650c381493a13
|
lib/passport-twitter/strategy.js
|
javascript
|
mit
|
Support for denied Twitter authentication attempts.
|
/**
* Module dependencies.
*/
var util = require('util')
, OAuthStrategy = require("passport-oauth").OAuthStrategy;
/**
* `Strategy` constructor.
*
* @api public
*/
function Strategy(options, validate) {
options = options || {};
options.requestTokenURL = options.requestTokenURL || 'https://twitter.com/oauth/request_token';
options.accessTokenURL = options.accessTokenURL || 'https://twitter.com/oauth/access_token';
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
OAuthStrategy.call(this, options, validate);
this.name = 'twitter';
}
/**
* Inherit from `OAuthStrategy`.
*/
util.inherits(Strategy, OAuthStrategy);
/**
* Expose `Strategy`.
*/
module.exports = Strategy;
|
/**
* Module dependencies.
*/
var util = require('util')
, OAuthStrategy = require("passport-oauth").OAuthStrategy;
/**
* `Strategy` constructor.
*
* @api public
*/
function Strategy(options, verify) {
options = options || {};
options.requestTokenURL = options.requestTokenURL || 'https://twitter.com/oauth/request_token';
options.accessTokenURL = options.accessTokenURL || 'https://twitter.com/oauth/access_token';
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
options.sessionKey = options.sessionKey || 'oauth:twitter';
OAuthStrategy.call(this, options, verify);
this.name = 'twitter';
}
/**
* Inherit from `OAuthStrategy`.
*/
util.inherits(Strategy, OAuthStrategy);
Strategy.prototype.authenticate = function(req) {
// When a user denies authorization on Twitter, they are presented with a link
// to return to the application in the following format (where xxx is the
// value of the request token):
//
// http://www.example.com/auth/twitter/callback?denied=xxx
//
// Following the link back to the application is interpreted as an
// authentication failure.
if (req.query && req.query.denied) {
return this.fail();
}
// Call the base class for standard OAuth authentication.
OAuthStrategy.prototype.authenticate.call(this, req);
}
/**
* Expose `Strategy`.
*/
module.exports = Strategy;
| 21
| 2
| 3
|
mixed
|
--- a/lib/passport-twitter/strategy.js
+++ b/lib/passport-twitter/strategy.js
@@ -12,3 +12,3 @@
*/
-function Strategy(options, validate) {
+function Strategy(options, verify) {
options = options || {};
@@ -17,4 +17,5 @@
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
+ options.sessionKey = options.sessionKey || 'oauth:twitter';
- OAuthStrategy.call(this, options, validate);
+ OAuthStrategy.call(this, options, verify);
this.name = 'twitter';
@@ -28,2 +29,20 @@
+Strategy.prototype.authenticate = function(req) {
+ // When a user denies authorization on Twitter, they are presented with a link
+ // to return to the application in the following format (where xxx is the
+ // value of the request token):
+ //
+ // http://www.example.com/auth/twitter/callback?denied=xxx
+ //
+ // Following the link back to the application is interpreted as an
+ // authentication failure.
+ if (req.query && req.query.denied) {
+ return this.fail();
+ }
+
+ // Call the base class for standard OAuth authentication.
+ OAuthStrategy.prototype.authenticate.call(this, req);
+}
+
+
/**
|
--- a/lib/passport-twitter/strategy.js
+++ b/lib/passport-twitter/strategy.js
@@ ... @@
*/
-function Strategy(options, validate) {
+function Strategy(options, verify) {
options = options || {};
@@ ... @@
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
+ options.sessionKey = options.sessionKey || 'oauth:twitter';
- OAuthStrategy.call(this, options, validate);
+ OAuthStrategy.call(this, options, verify);
this.name = 'twitter';
@@ ... @@
+Strategy.prototype.authenticate = function(req) {
+ // When a user denies authorization on Twitter, they are presented with a link
+ // to return to the application in the following format (where xxx is the
+ // value of the request token):
+ //
+ // http://www.example.com/auth/twitter/callback?denied=xxx
+ //
+ // Following the link back to the application is interpreted as an
+ // authentication failure.
+ if (req.query && req.query.denied) {
+ return this.fail();
+ }
+
+ // Call the base class for standard OAuth authentication.
+ OAuthStrategy.prototype.authenticate.call(this, req);
+}
+
+
/**
|
--- a/lib/passport-twitter/strategy.js
+++ b/lib/passport-twitter/strategy.js
@@ -12,3 +12,3 @@
CON */
DEL function Strategy(options, validate) {
ADD function Strategy(options, verify) {
CON options = options || {};
@@ -17,4 +17,5 @@
CON options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
ADD options.sessionKey = options.sessionKey || 'oauth:twitter';
CON
DEL OAuthStrategy.call(this, options, validate);
ADD OAuthStrategy.call(this, options, verify);
CON this.name = 'twitter';
@@ -28,2 +29,20 @@
CON
ADD Strategy.prototype.authenticate = function(req) {
ADD // When a user denies authorization on Twitter, they are presented with a link
ADD // to return to the application in the following format (where xxx is the
ADD // value of the request token):
ADD //
ADD // http://www.example.com/auth/twitter/callback?denied=xxx
ADD //
ADD // Following the link back to the application is interpreted as an
ADD // authentication failure.
ADD if (req.query && req.query.denied) {
ADD return this.fail();
ADD }
ADD
ADD // Call the base class for standard OAuth authentication.
ADD OAuthStrategy.prototype.authenticate.call(this, req);
ADD }
ADD
ADD
CON /**
|
<<<<<<< SEARCH
* @api public
*/
function Strategy(options, validate) {
options = options || {};
options.requestTokenURL = options.requestTokenURL || 'https://twitter.com/oauth/request_token';
options.accessTokenURL = options.accessTokenURL || 'https://twitter.com/oauth/access_token';
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
OAuthStrategy.call(this, options, validate);
this.name = 'twitter';
}
=======
* @api public
*/
function Strategy(options, verify) {
options = options || {};
options.requestTokenURL = options.requestTokenURL || 'https://twitter.com/oauth/request_token';
options.accessTokenURL = options.accessTokenURL || 'https://twitter.com/oauth/access_token';
options.userAuthorizationURL = options.userAuthorizationURL || 'https://twitter.com/oauth/authenticate';
options.sessionKey = options.sessionKey || 'oauth:twitter';
OAuthStrategy.call(this, options, verify);
this.name = 'twitter';
}
>>>>>>> REPLACE
<<<<<<< SEARCH
/**
* Expose `Strategy`.
=======
Strategy.prototype.authenticate = function(req) {
// When a user denies authorization on Twitter, they are presented with a link
// to return to the application in the following format (where xxx is the
// value of the request token):
//
// http://www.example.com/auth/twitter/callback?denied=xxx
//
// Following the link back to the application is interpreted as an
// authentication failure.
if (req.query && req.query.denied) {
return this.fail();
}
// Call the base class for standard OAuth authentication.
OAuthStrategy.prototype.authenticate.call(this, req);
}
/**
* Expose `Strategy`.
>>>>>>> REPLACE
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.