file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
handler.js | const config = require('../../../server/config'),
Manager = require('./manager'),
manager = new Manager();
// Responsible for handling requests for sitemap files
module.exports = function handler(siteApp) {
const verifyResourceType = function verifyResourceType(req, res, next) {
if (!Object.prototype.hasOwnProperty.call(manager, req.params.resource)) |
next();
};
siteApp.get('/sitemap.xml', function sitemapXML(req, res) {
res.set({
'Cache-Control': 'public, max-age=' + config.get('caching:sitemap:maxAge'),
'Content-Type': 'text/xml'
});
res.send(manager.getIndexXml());
});
siteApp.get('/sitemap-:resource.xml', verifyResourceType, function sitemapResourceXML(req, res) {
var type = req.params.resource,
page = 1;
res.set({
'Cache-Control': 'public, max-age=' + config.get('caching:sitemap:maxAge'),
'Content-Type': 'text/xml'
});
res.send(manager.getSiteMapXml(type, page));
});
};
| {
return res.sendStatus(404);
} | conditional_block |
handler.js | const config = require('../../../server/config'),
Manager = require('./manager'),
manager = new Manager();
// Responsible for handling requests for sitemap files
module.exports = function handler(siteApp) {
const verifyResourceType = function verifyResourceType(req, res, next) {
if (!Object.prototype.hasOwnProperty.call(manager, req.params.resource)) {
return res.sendStatus(404); | next();
};
siteApp.get('/sitemap.xml', function sitemapXML(req, res) {
res.set({
'Cache-Control': 'public, max-age=' + config.get('caching:sitemap:maxAge'),
'Content-Type': 'text/xml'
});
res.send(manager.getIndexXml());
});
siteApp.get('/sitemap-:resource.xml', verifyResourceType, function sitemapResourceXML(req, res) {
var type = req.params.resource,
page = 1;
res.set({
'Cache-Control': 'public, max-age=' + config.get('caching:sitemap:maxAge'),
'Content-Type': 'text/xml'
});
res.send(manager.getSiteMapXml(type, page));
});
}; | }
| random_line_split |
conf.py | # -*- coding: utf-8 -*-
#
# Sphinx RTD theme demo documentation build configuration file, created by
# sphinx-quickstart on Sun Nov 3 11:56:36 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('./test_py_module'))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Math
mathjax_path = "http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document. |
# General information about the project.
project = u'Sphinx RTD theme demo'
copyright = u'2013, Dave Snider'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# 'sticky_navigation' : True # Set to False to disable the sticky nav while scrolling.
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["../.."]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'SphinxRTDthemedemodoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'SphinxRTDthemedemo.tex', u'Sphinx RTD theme demo Documentation',
u'Dave Snider', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sphinxrtdthemedemo', u'Sphinx RTD theme demo Documentation',
[u'Dave Snider'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'SphinxRTDthemedemo', u'Sphinx RTD theme demo Documentation',
u'Dave Snider', 'SphinxRTDthemedemo', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote' | master_doc = 'index' | random_line_split |
std-uncopyable-atomics.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #8380
#[feature(globs)];
use std::unstable::atomics::*;
use std::ptr;
| let x = INIT_ATOMIC_BOOL;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_INT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_UINT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicPtr<uint> = AtomicPtr::new(ptr::mut_null());
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicOption<uint> = AtomicOption::empty();
let x = *&x; //~ ERROR: cannot move out of dereference
} | fn main() {
let x = INIT_ATOMIC_FLAG;
let x = *&x; //~ ERROR: cannot move out of dereference | random_line_split |
std-uncopyable-atomics.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #8380
#[feature(globs)];
use std::unstable::atomics::*;
use std::ptr;
fn | () {
let x = INIT_ATOMIC_FLAG;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_BOOL;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_INT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_UINT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicPtr<uint> = AtomicPtr::new(ptr::mut_null());
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicOption<uint> = AtomicOption::empty();
let x = *&x; //~ ERROR: cannot move out of dereference
}
| main | identifier_name |
std-uncopyable-atomics.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #8380
#[feature(globs)];
use std::unstable::atomics::*;
use std::ptr;
fn main() | {
let x = INIT_ATOMIC_FLAG;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_BOOL;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_INT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x = INIT_ATOMIC_UINT;
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicPtr<uint> = AtomicPtr::new(ptr::mut_null());
let x = *&x; //~ ERROR: cannot move out of dereference
let x: AtomicOption<uint> = AtomicOption::empty();
let x = *&x; //~ ERROR: cannot move out of dereference
} | identifier_body |
|
tmp-tests.ts | import tmp = require('tmp');
tmp.file((err, path, fd, cleanupCallback) => {
if (err) throw err;
console.log("File: ", path);
console.log("Filedescriptor: ", fd);
cleanupCallback();
});
tmp.dir((err, path, cleanupCallback) => {
if (err) throw err;
console.log("Dir: ", path);
cleanupCallback();
});
| if (err) throw err;
console.log("Created temporary filename: ", path);
});
tmp.file({ mode: 644, prefix: 'prefix-', postfix: '.txt' }, (err, path, fd) => {
if (err) throw err;
console.log("File: ", path);
console.log("Filedescriptor: ", fd);
});
tmp.dir({ mode: 750, prefix: 'myTmpDir_' }, (err, path) => {
if (err) throw err;
console.log("Dir: ", path);
});
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }, (err, path) => {
if (err) throw err;
console.log("Created temporary filename: ", path);
});
tmp.setGracefulCleanup();
var tmpobj = tmp.fileSync();
console.log("File: ", tmpobj.name);
console.log("Filedescriptor: ", tmpobj.fd);
tmpobj.removeCallback();
tmpobj = tmp.dirSync();
console.log("Dir: ", tmpobj.name);
tmpobj.removeCallback();
var name = tmp.tmpNameSync();
console.log("Created temporary filename: ", name);
tmpobj = tmp.fileSync({ mode: 644, prefix: 'prefix-', postfix: '.txt' });
console.log("File: ", tmpobj.name);
console.log("Filedescriptor: ", tmpobj.fd);
tmpobj = tmp.dirSync({ mode: 750, prefix: 'myTmpDir_' });
console.log("Dir: ", tmpobj.name);
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
console.log("Created temporary filename: ", tmpname ); | tmp.tmpName((err, path) => { | random_line_split |
sms_send_verification_code.rs | extern crate open189;
fn main() {
let args: Vec<_> = std::env::args().collect();
if args.len() < 6 {
println!("usage: {} <app id> <secret> <access token> <phone> <code> <expire time>",
args[0]);
std::process::exit(1);
}
let app_id = &args[1];
let secret = &args[2];
let access_token = &args[3];
let phone = &args[4];
let code = &args[5];
let expire_time: Option<usize> = if args.len() < 7 {
None
} else | ;
let app = open189::Open189App::new(app_id, secret);
let sms_token = app.sms_get_token(access_token);
println!("sms token = {:?}", sms_token);
let sms_token = sms_token.unwrap();
let config = open189::SmsCodeConfig::prepared(phone, code, expire_time);
let result = app.sms_send_verification_code(access_token, &sms_token, config);
println!("send result = {:?}", result);
}
| {
Some(args[6].parse().unwrap())
} | conditional_block |
sms_send_verification_code.rs | extern crate open189;
fn main() {
let args: Vec<_> = std::env::args().collect();
if args.len() < 6 {
println!("usage: {} <app id> <secret> <access token> <phone> <code> <expire time>",
args[0]);
std::process::exit(1);
}
let app_id = &args[1];
let secret = &args[2];
let access_token = &args[3];
let phone = &args[4];
let code = &args[5];
let expire_time: Option<usize> = if args.len() < 7 {
None
} else {
Some(args[6].parse().unwrap())
};
let app = open189::Open189App::new(app_id, secret);
let sms_token = app.sms_get_token(access_token);
println!("sms token = {:?}", sms_token);
let sms_token = sms_token.unwrap();
| } | let config = open189::SmsCodeConfig::prepared(phone, code, expire_time);
let result = app.sms_send_verification_code(access_token, &sms_token, config);
println!("send result = {:?}", result); | random_line_split |
sms_send_verification_code.rs | extern crate open189;
fn main() | {
let args: Vec<_> = std::env::args().collect();
if args.len() < 6 {
println!("usage: {} <app id> <secret> <access token> <phone> <code> <expire time>",
args[0]);
std::process::exit(1);
}
let app_id = &args[1];
let secret = &args[2];
let access_token = &args[3];
let phone = &args[4];
let code = &args[5];
let expire_time: Option<usize> = if args.len() < 7 {
None
} else {
Some(args[6].parse().unwrap())
};
let app = open189::Open189App::new(app_id, secret);
let sms_token = app.sms_get_token(access_token);
println!("sms token = {:?}", sms_token);
let sms_token = sms_token.unwrap();
let config = open189::SmsCodeConfig::prepared(phone, code, expire_time);
let result = app.sms_send_verification_code(access_token, &sms_token, config);
println!("send result = {:?}", result);
} | identifier_body |
|
sms_send_verification_code.rs | extern crate open189;
fn | () {
let args: Vec<_> = std::env::args().collect();
if args.len() < 6 {
println!("usage: {} <app id> <secret> <access token> <phone> <code> <expire time>",
args[0]);
std::process::exit(1);
}
let app_id = &args[1];
let secret = &args[2];
let access_token = &args[3];
let phone = &args[4];
let code = &args[5];
let expire_time: Option<usize> = if args.len() < 7 {
None
} else {
Some(args[6].parse().unwrap())
};
let app = open189::Open189App::new(app_id, secret);
let sms_token = app.sms_get_token(access_token);
println!("sms token = {:?}", sms_token);
let sms_token = sms_token.unwrap();
let config = open189::SmsCodeConfig::prepared(phone, code, expire_time);
let result = app.sms_send_verification_code(access_token, &sms_token, config);
println!("send result = {:?}", result);
}
| main | identifier_name |
testqueue.js | // Copyright 2007 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Generic queue for writing unit tests.
*
*/
goog.provide('goog.testing.TestQueue');
/**
* Generic queue for writing unit tests
* @constructor
*/
goog.testing.TestQueue = function() {
/**
* Events that have accumulated
* @type {Array.<Object>}
* @private
*/
this.events_ = [];
};
/**
* Adds a new event onto the queue.
* @param {Object} event The event to queue.
*/
goog.testing.TestQueue.prototype.enqueue = function(event) {
this.events_.push(event);
};
/**
* Returns whether the queue is empty.
* @return {boolean} Whether the queue is empty.
*/
goog.testing.TestQueue.prototype.isEmpty = function() {
return this.events_.length == 0;
};
/**
* Gets the next event from the queue. Throws an exception if the queue is
* empty.
* @param {string=} opt_comment Comment if the queue is empty.
* @return {Object} The next event from the queue.
*/
goog.testing.TestQueue.prototype.dequeue = function(opt_comment) {
if (this.isEmpty()) |
return this.events_.shift();
};
| {
throw Error('Handler is empty: ' + opt_comment);
} | conditional_block |
testqueue.js | // Copyright 2007 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Generic queue for writing unit tests.
*
*/
goog.provide('goog.testing.TestQueue');
/**
* Generic queue for writing unit tests
* @constructor
*/
goog.testing.TestQueue = function() {
/**
* Events that have accumulated
* @type {Array.<Object>}
* @private
*/
this.events_ = []; | };
/**
* Adds a new event onto the queue.
* @param {Object} event The event to queue.
*/
goog.testing.TestQueue.prototype.enqueue = function(event) {
this.events_.push(event);
};
/**
* Returns whether the queue is empty.
* @return {boolean} Whether the queue is empty.
*/
goog.testing.TestQueue.prototype.isEmpty = function() {
return this.events_.length == 0;
};
/**
* Gets the next event from the queue. Throws an exception if the queue is
* empty.
* @param {string=} opt_comment Comment if the queue is empty.
* @return {Object} The next event from the queue.
*/
goog.testing.TestQueue.prototype.dequeue = function(opt_comment) {
if (this.isEmpty()) {
throw Error('Handler is empty: ' + opt_comment);
}
return this.events_.shift();
}; | random_line_split |
|
WavesAPI.spec.ts | import { expect } from './getChai';
import * as WavesAPI from '../dist/waves-api';
let requiredConfigValues;
let allConfigValues;
describe('WavesAPI', () => {
beforeEach(() => {
requiredConfigValues = {
networkByte: 1,
nodeAddress: '1',
matcherAddress: '1',
logLevel: 'warning',
timeDiff: 0 // TODO : add some cases in the future API tests
};
allConfigValues = {
...requiredConfigValues,
minimumSeedLength: 1,
requestOffset: 1, |
it('should throw when created without required fields in config', () => {
expect(() => WavesAPI.create({})).to.throw();
expect(() => WavesAPI.create({ networkByte: 1, nodeAddress: '1' })).to.throw();
expect(() => WavesAPI.create({ networkByte: 1, matcherAddress: '1' })).to.throw();
expect(() => WavesAPI.create({ nodeAddress: '1', matcherAddress: '1' })).to.throw();
});
it('should have all fields in config when all fields are passed', () => {
const Waves = WavesAPI.create(allConfigValues);
expect(Waves.config.get()).to.deep.equal(allConfigValues);
});
it('should have all fields in config when only required fields are passed', () => {
const Waves = WavesAPI.create(requiredConfigValues);
const config = Waves.config.get();
expect(Object.keys(config)).to.have.members(Object.keys(allConfigValues));
});
it('should create seed without errors', () => {
const Waves = WavesAPI.create(requiredConfigValues);
const seed = Waves.Seed.create();
expect(seed.phrase).to.be.a('string');
});
it('should only insert fallback basic values when stored config does not have them', () => {
const logLevel = 'none';
const Waves = WavesAPI.create({ ...requiredConfigValues, logLevel });
Waves.config.set({ assetFactory: () => {} });
const config = Waves.config.get();
expect(config.logLevel).to.equal(logLevel);
const Waves2 = WavesAPI.create(requiredConfigValues);
const config2 = Waves2.config.get();
expect(config2.logLevel).to.equal(Waves.constants.DEFAULT_BASIC_CONFIG.logLevel);
});
}); | requestLimit: 1
};
}); | random_line_split |
mk.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'clipboard', 'mk', {
copy: 'Копирај (Copy)',
copyError: 'Опциите за безбедност на вашиот прелистувач не дозволуваат уредувачот автоматски да изврши копирање. Ве молиме употребете ја тастатурата. (Ctrl/Cmd+C)',
cut: 'Исечи (Cut)',
cutError: 'Опциите за безбедност на вашиот прелистувач не дозволуваат уредувачот автоматски да изврши сечење. Ве молиме употребете ја тастатурата. (Ctrl/Cmd+C)', | paste: 'Залепи (Paste)',
pasteNotification: 'Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.', // MISSING
pasteArea: 'Простор за залепување',
pasteMsg: 'Paste your content inside the area below and press OK.' // MISSING
} ); | random_line_split |
|
pl-pl.py | # coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Uaktualnij" jest dodatkowym wyra\xc5\xbceniem postaci "pole1=\'nowawarto\xc5\x9b\xc4\x87\'". Nie mo\xc5\xbcesz uaktualni\xc4\x87 lub usun\xc4\x85\xc4\x87 wynik\xc3\xb3w z JOIN:',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': 'Wierszy usuni\xc4\x99tych: %s',
'%s rows updated': 'Wierszy uaktualnionych: %s',
'Available databases and tables': 'Dost\xc4\x99pne bazy danych i tabele',
'Cannot be empty': 'Nie mo\xc5\xbce by\xc4\x87 puste',
'Change Password': 'Change Password',
'Check to delete': 'Zaznacz aby usun\xc4\x85\xc4\x87',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Aktualne \xc5\xbc\xc4\x85danie',
'Current response': 'Aktualna odpowied\xc5\xba', | 'Edit Profile': 'Edit Profile',
'Edit This App': 'Edit This App',
'Edit current record': 'Edytuj aktualny rekord',
'Hello World': 'Witaj \xc5\x9awiecie',
'Import/Export': 'Importuj/eksportuj',
'Index': 'Index',
'Internal State': 'Stan wewn\xc4\x99trzny',
'Invalid Query': 'B\xc5\x82\xc4\x99dne zapytanie',
'Layout': 'Layout',
'Login': 'Zaloguj',
'Logout': 'Logout',
'Lost Password': 'Przypomnij has\xc5\x82o',
'Main Menu': 'Main Menu',
'Menu Model': 'Menu Model',
'New Record': 'Nowy rekord',
'No databases in this application': 'Brak baz danych w tej aplikacji',
'Powered by': 'Powered by',
'Query:': 'Zapytanie:',
'Register': 'Zarejestruj',
'Rows in table': 'Wiersze w tabeli',
'Rows selected': 'Wybrane wiersze',
'Stylesheet': 'Stylesheet',
'Sure you want to delete this object?': 'Czy na pewno chcesz usun\xc4\x85\xc4\x87 ten obiekt?',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Zapytanie" jest warunkiem postaci "db.tabela1.pole1==\'warto\xc5\x9b\xc4\x87\'". Takie co\xc5\x9b jak "db.tabela1.pole1==db.tabela2.pole2" oznacza SQL JOIN.',
'Update:': 'Uaktualnij:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'U\xc5\xbcyj (...)&(...) jako AND, (...)|(...) jako OR oraz ~(...) jako NOT do tworzenia bardziej skomplikowanych zapyta\xc5\x84.',
'View': 'View',
'Welcome %s': 'Welcome %s',
'Welcome to web2py': 'Witaj w web2py',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'cache': 'cache',
'change password': 'change password',
'Online examples': 'Kliknij aby przej\xc5\x9b\xc4\x87 do interaktywnych przyk\xc5\x82ad\xc3\xb3w',
'Administrative interface': 'Kliknij aby przej\xc5\x9b\xc4\x87 do panelu administracyjnego',
'customize me!': 'dostosuj mnie!',
'data uploaded': 'dane wys\xc5\x82ane',
'database': 'baza danych',
'database %s select': 'wyb\xc3\xb3r z bazy danych %s',
'db': 'baza danych',
'design': 'projektuj',
'done!': 'zrobione!',
'edit profile': 'edit profile',
'export as csv file': 'eksportuj jako plik csv',
'insert new': 'wstaw nowy rekord tabeli',
'insert new %s': 'wstaw nowy rekord do tabeli %s',
'invalid request': 'B\xc5\x82\xc4\x99dne \xc5\xbc\xc4\x85danie',
'login': 'login',
'logout': 'logout',
'new record inserted': 'nowy rekord zosta\xc5\x82 wstawiony',
'next 100 rows': 'nast\xc4\x99pne 100 wierszy',
'or import from csv file': 'lub zaimportuj z pliku csv',
'previous 100 rows': 'poprzednie 100 wierszy',
'record': 'record',
'record does not exist': 'rekord nie istnieje',
'record id': 'id rekordu',
'register': 'register',
'selected': 'wybranych',
'state': 'stan',
'table': 'tabela',
'unable to parse csv file': 'nie mo\xc5\xbcna sparsowa\xc4\x87 pliku csv',
} | 'Current session': 'Aktualna sesja',
'DB Model': 'DB Model',
'Database': 'Database',
'Delete:': 'Usu\xc5\x84:',
'Edit': 'Edit', | random_line_split |
MultipartParser.spec.ts | /* tslint:disable:no-unused-expression */
import * as Chai from "chai";
import { SinonSpy, spy } from "sinon";
import App from "./../../../src/lib/App";
import MultipartParser from "./../../../src/lib/http/bodyParsers/MultipartParser";
import HttpRequest from "./../../../src/lib/http/HttpRequest";
import HttpUploadedFile from "./../../../src/lib/http/HttpUploadedFile";
import IHttpHandler from "./../../../src/lib/types/http/IHttpHandler";
import IHttpRequest from "./../../../src/lib/types/http/IHttpRequest";
import IHttpResponse from "./../../../src/lib/types/http/IHttpResponse";
import IHttpUploadedFile from "./../../../src/lib/types/http/IHttpUploadedFile";
import IApp from "./../../../src/lib/types/IApp";
const mainEvent: any = {
body: "------WebKitFormBoundaryvef1fLxmoUdYZWXp\n"
+ "Content-Disposition: form-data; name=\"text\"\n"
+ "\n"
+ "text default\n"
+ "------WebKitFormBoundaryvef1fLxmoUdYZWXp\n"
+ "Content-Disposition: form-data; name=\"file\"; filename=\"A.txt\"\n"
+ "Content-Type: text/plain\n"
+ "Content-Length: 17\n"
+ "Other-Header: Other\n"
+ "\n"
+ "file text default\n"
+ "------WebKitFormBoundaryvef1fLxmoUdYZWXp\n"
+ "Content-Disposition: form-data; name=\"file2\"; filename=\"B.txt\"\n"
+ "\n"
+ "file text default 2\n"
+ "------WebKitFormBoundaryvef1fLxmoUdYZWXp--",
headers: {
"Content-Type": "multipart/form-data; boundary=----WebKitFormBoundaryvef1fLxmoUdYZWXp"
},
httpMethod: "POST",
isBase64Encoded: true,
path: "/blog",
resource: "API"
};
/**
* Test for MultipartParser.
*/
describe("MultipartParser", () => {
const app: IApp = new App();
const res: IHttpResponse = {} as IHttpResponse;
let next: SinonSpy;
let event: any;
const handler: IHttpHandler = (new MultipartParser()).create(); | beforeEach(() => {
event = Object.assign({}, mainEvent);
event.headers = Object.assign({}, mainEvent.headers);
next = spy();
});
it("should call 'next' WITHOUT an error if the body can not be parsed and header contentType is undefined.", () => {
event.body = "errorBody";
event.headers["Content-Type"] = undefined;
const req: IHttpRequest = new HttpRequest(app, event);
handler(req, res, next);
Chai.expect(next.called).to.be.true;
Chai.expect(next.args[0][0]).to.be.undefined;
});
it("should set the body with the parsed body as an object if header contentType is 'multipart/form-data'.", () => {
const req: IHttpRequest = new HttpRequest(app, event);
handler(req, res, next);
const expectedFiles: IHttpUploadedFile[] = [];
expectedFiles.push(new HttpUploadedFile("text/plain", 17, "A.txt", "file text default", {
"content-disposition": "form-data",
"content-type": "text/plain",
"content-length": "17",
"other-header": "Other"
}));
expectedFiles.push(new HttpUploadedFile(null, null, "B.txt", "file text default 2", {
"content-disposition": "form-data"
}));
Chai.expect(next.called).to.be.true;
Chai.expect(req.body).to.be.deep.equal({text: "text default"});
Chai.expect(req.files).to.be.deep.equal(expectedFiles);
});
it("should NOT set the body if header contentType is 'text/html'.", () => {
event.headers["Content-Type"] = "text/html";
const req: IHttpRequest = new HttpRequest(app, event);
handler(req, res, next);
Chai.expect(next.called).to.be.true;
Chai.expect(req.body).to.be.equal(event.body);
});
}); | random_line_split |
|
F3D_syn.py | #!/usr/bin/env python
import numpy as np
import os,sys
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import argparse
ap=argparse.ArgumentParser()
ap.add_argument('-vis') # 1 plot cropped point cloud
ap.add_argument('-refine') # 1 refine mesh
ap.add_argument('-clean') # 1 remove tmp files
if ap.parse_args().vis==None:
vis=0
else:
vis=int(ap.parse_args().vis)
if ap.parse_args().refine==None:
refine=0
else:
refine=int(ap.parse_args().refine)
if ap.parse_args().clean==None:
clean=0
else:
clean=int(ap.parse_args().clean)
# Synthetic fault pixels
z=np.linspace(.2, -.8, num=100)
y=np.linspace(-.625,.625, num=120)
grid=np.meshgrid(y,z)
x=np.zeros((len(z)*len(y),1),dtype=np.float)
dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape)))
| #omg=(np.random.rand(wl.shape[0])-.5)*np.pi
L=dat_vert[1,:].max()-dat_vert[1,:].min()
zmax=z.max(); zmin=z.min()
for i in range(len(wl)):
phs=dat_vert[:,1]/wl[i]*np.pi+omg[i]
dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi)
dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi)
# ridge patch
def flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup):
b1=-slope1*trunc1-.7
b2=-slope2*trunc2-.7
in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0]
out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id)
x_shift=dat_vert[in_id,0]
# ridge patch
k=0
zup=dat_vert[:,2].max()
zlw=dat_vert[:,2].min()
for i in in_id:
r=abs(dat_vert[i,1]-.5*(trunc1+trunc2))
R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1)
h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw)
x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h
k+=1
dat_vert=np.vstack((dat_vert[out_id,:],
np.hstack((x_shift.reshape(len(in_id),1),
dat_vert[in_id,1].reshape(len(in_id),1),
dat_vert[in_id,2].reshape(len(in_id),1)))))
return dat_vert
slope1=10.;slope2=-10.
trunc1=.1;trunc2=.6
hup=0.;hlw=.08
#dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup)
print omg
fout='F3D_syn.xyz'
f=open(fout,'w+')
np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3)
f.close()
from subprocess import call
fin=fout
fout=fout.rsplit('.')[0]+'.stl'
mxl='xyz2stl.mlx'
call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl])
if clean==1: os.remove(fin)
# Mesh
fin=fout
if refine==1:
fout=fout.rsplit('.')[0]+'_dns.exo'
else:
fout=fout.rsplit('.')[0]+'.exo'
jou='F3D_tet.jou'
txt_jou=open(jou,'r')
txt_jou_tmp=open('tmp.jou','w+')
hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m)
hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m)
for line in txt_jou:
line=line.strip('\r\n')
if 'import' in line.lower():
line='import stl "'+fin+'"'
if 'export' in line.lower():
line='export mesh "'+fout+'" dimension 3 overwrite'
if 'surface 46 94 95 97 size' in line.lower():
line='surface 46 94 95 97 size %0.6f' %(2*hf)
if 'volume all size' in line.lower():
line='volume all size %0.6f' %(2*hm)
txt_jou_tmp.write(line+'\n')
if 'mesh volume all' in line.lower() and refine==1:
txt_jou_tmp.write('refine volume all\n')
txt_jou.close();txt_jou_tmp.close()
call(['trelis','-nojournal','-nographics','tmp.jou'])
if clean==1: os.remove('tmp.jou')
# Preprocessing msh=>inp
dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet
import F3D_msh2inp
_=F3D_msh2inp.msh2inp(fout,dt_dyn)
# Fault plot
if vis==1:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.')
# Create cubic bounding box to simulate equal aspect ratio
max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\
-np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max()
Xb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten()
Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten()
Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten()
for xb, yb, zb in zip(Xb, Yb, Zb):
ax.plot([xb], [yb], [zb], 'w',)
plt.title('fault [km]')
plt.grid()
plt.show() | # weak
wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl)
e=1.025; r=-.2
dip=70.; zcnt=-.35
omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393] | random_line_split |
F3D_syn.py | #!/usr/bin/env python
import numpy as np
import os,sys
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import argparse
ap=argparse.ArgumentParser()
ap.add_argument('-vis') # 1 plot cropped point cloud
ap.add_argument('-refine') # 1 refine mesh
ap.add_argument('-clean') # 1 remove tmp files
if ap.parse_args().vis==None:
vis=0
else:
vis=int(ap.parse_args().vis)
if ap.parse_args().refine==None:
refine=0
else:
refine=int(ap.parse_args().refine)
if ap.parse_args().clean==None:
clean=0
else:
clean=int(ap.parse_args().clean)
# Synthetic fault pixels
z=np.linspace(.2, -.8, num=100)
y=np.linspace(-.625,.625, num=120)
grid=np.meshgrid(y,z)
x=np.zeros((len(z)*len(y),1),dtype=np.float)
dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape)))
# weak
wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl)
e=1.025; r=-.2
dip=70.; zcnt=-.35
omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393]
#omg=(np.random.rand(wl.shape[0])-.5)*np.pi
L=dat_vert[1,:].max()-dat_vert[1,:].min()
zmax=z.max(); zmin=z.min()
for i in range(len(wl)):
phs=dat_vert[:,1]/wl[i]*np.pi+omg[i]
dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi)
dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi)
# ridge patch
def flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup):
|
slope1=10.;slope2=-10.
trunc1=.1;trunc2=.6
hup=0.;hlw=.08
#dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup)
print omg
fout='F3D_syn.xyz'
f=open(fout,'w+')
np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3)
f.close()
from subprocess import call
fin=fout
fout=fout.rsplit('.')[0]+'.stl'
mxl='xyz2stl.mlx'
call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl])
if clean==1: os.remove(fin)
# Mesh
fin=fout
if refine==1:
fout=fout.rsplit('.')[0]+'_dns.exo'
else:
fout=fout.rsplit('.')[0]+'.exo'
jou='F3D_tet.jou'
txt_jou=open(jou,'r')
txt_jou_tmp=open('tmp.jou','w+')
hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m)
hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m)
for line in txt_jou:
line=line.strip('\r\n')
if 'import' in line.lower():
line='import stl "'+fin+'"'
if 'export' in line.lower():
line='export mesh "'+fout+'" dimension 3 overwrite'
if 'surface 46 94 95 97 size' in line.lower():
line='surface 46 94 95 97 size %0.6f' %(2*hf)
if 'volume all size' in line.lower():
line='volume all size %0.6f' %(2*hm)
txt_jou_tmp.write(line+'\n')
if 'mesh volume all' in line.lower() and refine==1:
txt_jou_tmp.write('refine volume all\n')
txt_jou.close();txt_jou_tmp.close()
call(['trelis','-nojournal','-nographics','tmp.jou'])
if clean==1: os.remove('tmp.jou')
# Preprocessing msh=>inp
dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet
import F3D_msh2inp
_=F3D_msh2inp.msh2inp(fout,dt_dyn)
# Fault plot
if vis==1:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.')
# Create cubic bounding box to simulate equal aspect ratio
max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\
-np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max()
Xb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten()
Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten()
Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten()
for xb, yb, zb in zip(Xb, Yb, Zb):
ax.plot([xb], [yb], [zb], 'w',)
plt.title('fault [km]')
plt.grid()
plt.show()
| b1=-slope1*trunc1-.7
b2=-slope2*trunc2-.7
in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0]
out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id)
x_shift=dat_vert[in_id,0]
# ridge patch
k=0
zup=dat_vert[:,2].max()
zlw=dat_vert[:,2].min()
for i in in_id:
r=abs(dat_vert[i,1]-.5*(trunc1+trunc2))
R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1)
h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw)
x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h
k+=1
dat_vert=np.vstack((dat_vert[out_id,:],
np.hstack((x_shift.reshape(len(in_id),1),
dat_vert[in_id,1].reshape(len(in_id),1),
dat_vert[in_id,2].reshape(len(in_id),1)))))
return dat_vert | identifier_body |
F3D_syn.py | #!/usr/bin/env python
import numpy as np
import os,sys
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import argparse
ap=argparse.ArgumentParser()
ap.add_argument('-vis') # 1 plot cropped point cloud
ap.add_argument('-refine') # 1 refine mesh
ap.add_argument('-clean') # 1 remove tmp files
if ap.parse_args().vis==None:
vis=0
else:
vis=int(ap.parse_args().vis)
if ap.parse_args().refine==None:
refine=0
else:
refine=int(ap.parse_args().refine)
if ap.parse_args().clean==None:
clean=0
else:
clean=int(ap.parse_args().clean)
# Synthetic fault pixels
z=np.linspace(.2, -.8, num=100)
y=np.linspace(-.625,.625, num=120)
grid=np.meshgrid(y,z)
x=np.zeros((len(z)*len(y),1),dtype=np.float)
dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape)))
# weak
wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl)
e=1.025; r=-.2
dip=70.; zcnt=-.35
omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393]
#omg=(np.random.rand(wl.shape[0])-.5)*np.pi
L=dat_vert[1,:].max()-dat_vert[1,:].min()
zmax=z.max(); zmin=z.min()
for i in range(len(wl)):
phs=dat_vert[:,1]/wl[i]*np.pi+omg[i]
dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi)
dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi)
# ridge patch
def flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup):
b1=-slope1*trunc1-.7
b2=-slope2*trunc2-.7
in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0]
out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id)
x_shift=dat_vert[in_id,0]
# ridge patch
k=0
zup=dat_vert[:,2].max()
zlw=dat_vert[:,2].min()
for i in in_id:
r=abs(dat_vert[i,1]-.5*(trunc1+trunc2))
R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1)
h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw)
x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h
k+=1
dat_vert=np.vstack((dat_vert[out_id,:],
np.hstack((x_shift.reshape(len(in_id),1),
dat_vert[in_id,1].reshape(len(in_id),1),
dat_vert[in_id,2].reshape(len(in_id),1)))))
return dat_vert
slope1=10.;slope2=-10.
trunc1=.1;trunc2=.6
hup=0.;hlw=.08
#dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup)
print omg
fout='F3D_syn.xyz'
f=open(fout,'w+')
np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3)
f.close()
from subprocess import call
fin=fout
fout=fout.rsplit('.')[0]+'.stl'
mxl='xyz2stl.mlx'
call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl])
if clean==1: os.remove(fin)
# Mesh
fin=fout
if refine==1:
fout=fout.rsplit('.')[0]+'_dns.exo'
else:
fout=fout.rsplit('.')[0]+'.exo'
jou='F3D_tet.jou'
txt_jou=open(jou,'r')
txt_jou_tmp=open('tmp.jou','w+')
hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m)
hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m)
for line in txt_jou:
line=line.strip('\r\n')
if 'import' in line.lower():
line='import stl "'+fin+'"'
if 'export' in line.lower():
|
if 'surface 46 94 95 97 size' in line.lower():
line='surface 46 94 95 97 size %0.6f' %(2*hf)
if 'volume all size' in line.lower():
line='volume all size %0.6f' %(2*hm)
txt_jou_tmp.write(line+'\n')
if 'mesh volume all' in line.lower() and refine==1:
txt_jou_tmp.write('refine volume all\n')
txt_jou.close();txt_jou_tmp.close()
call(['trelis','-nojournal','-nographics','tmp.jou'])
if clean==1: os.remove('tmp.jou')
# Preprocessing msh=>inp
dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet
import F3D_msh2inp
_=F3D_msh2inp.msh2inp(fout,dt_dyn)
# Fault plot
if vis==1:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.')
# Create cubic bounding box to simulate equal aspect ratio
max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\
-np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max()
Xb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten()
Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten()
Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten()
for xb, yb, zb in zip(Xb, Yb, Zb):
ax.plot([xb], [yb], [zb], 'w',)
plt.title('fault [km]')
plt.grid()
plt.show()
| line='export mesh "'+fout+'" dimension 3 overwrite' | conditional_block |
F3D_syn.py | #!/usr/bin/env python
import numpy as np
import os,sys
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import argparse
ap=argparse.ArgumentParser()
ap.add_argument('-vis') # 1 plot cropped point cloud
ap.add_argument('-refine') # 1 refine mesh
ap.add_argument('-clean') # 1 remove tmp files
if ap.parse_args().vis==None:
vis=0
else:
vis=int(ap.parse_args().vis)
if ap.parse_args().refine==None:
refine=0
else:
refine=int(ap.parse_args().refine)
if ap.parse_args().clean==None:
clean=0
else:
clean=int(ap.parse_args().clean)
# Synthetic fault pixels
z=np.linspace(.2, -.8, num=100)
y=np.linspace(-.625,.625, num=120)
grid=np.meshgrid(y,z)
x=np.zeros((len(z)*len(y),1),dtype=np.float)
dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape)))
# weak
wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl)
e=1.025; r=-.2
dip=70.; zcnt=-.35
omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393]
#omg=(np.random.rand(wl.shape[0])-.5)*np.pi
L=dat_vert[1,:].max()-dat_vert[1,:].min()
zmax=z.max(); zmin=z.min()
for i in range(len(wl)):
phs=dat_vert[:,1]/wl[i]*np.pi+omg[i]
dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi)
dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi)
# ridge patch
def | (dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup):
b1=-slope1*trunc1-.7
b2=-slope2*trunc2-.7
in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0]
out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id)
x_shift=dat_vert[in_id,0]
# ridge patch
k=0
zup=dat_vert[:,2].max()
zlw=dat_vert[:,2].min()
for i in in_id:
r=abs(dat_vert[i,1]-.5*(trunc1+trunc2))
R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1)
h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw)
x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h
k+=1
dat_vert=np.vstack((dat_vert[out_id,:],
np.hstack((x_shift.reshape(len(in_id),1),
dat_vert[in_id,1].reshape(len(in_id),1),
dat_vert[in_id,2].reshape(len(in_id),1)))))
return dat_vert
slope1=10.;slope2=-10.
trunc1=.1;trunc2=.6
hup=0.;hlw=.08
#dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup)
print omg
fout='F3D_syn.xyz'
f=open(fout,'w+')
np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3)
f.close()
from subprocess import call
fin=fout
fout=fout.rsplit('.')[0]+'.stl'
mxl='xyz2stl.mlx'
call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl])
if clean==1: os.remove(fin)
# Mesh
fin=fout
if refine==1:
fout=fout.rsplit('.')[0]+'_dns.exo'
else:
fout=fout.rsplit('.')[0]+'.exo'
jou='F3D_tet.jou'
txt_jou=open(jou,'r')
txt_jou_tmp=open('tmp.jou','w+')
hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m)
hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m)
for line in txt_jou:
line=line.strip('\r\n')
if 'import' in line.lower():
line='import stl "'+fin+'"'
if 'export' in line.lower():
line='export mesh "'+fout+'" dimension 3 overwrite'
if 'surface 46 94 95 97 size' in line.lower():
line='surface 46 94 95 97 size %0.6f' %(2*hf)
if 'volume all size' in line.lower():
line='volume all size %0.6f' %(2*hm)
txt_jou_tmp.write(line+'\n')
if 'mesh volume all' in line.lower() and refine==1:
txt_jou_tmp.write('refine volume all\n')
txt_jou.close();txt_jou_tmp.close()
call(['trelis','-nojournal','-nographics','tmp.jou'])
if clean==1: os.remove('tmp.jou')
# Preprocessing msh=>inp
dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet
import F3D_msh2inp
_=F3D_msh2inp.msh2inp(fout,dt_dyn)
# Fault plot
if vis==1:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.')
# Create cubic bounding box to simulate equal aspect ratio
max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\
-np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max()
Xb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten()
Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten()
Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten()
for xb, yb, zb in zip(Xb, Yb, Zb):
ax.plot([xb], [yb], [zb], 'w',)
plt.title('fault [km]')
plt.grid()
plt.show()
| flt_patch | identifier_name |
render.ts | import jTool from '@jTool';
import {getRowData, getTableData} from '@common/cache';
import { getAllTh, getDiv, getEmpty, getTbody, getThead, getVisibleTh, setAreVisible, updateVisibleLast } from '@common/base';
import { DISABLE_CUSTOMIZE, EMPTY_DATA_CLASS_NAME, EMPTY_TPL_KEY, ODD, PX, ROW_CLASS_NAME, TH_NAME, TR_CACHE_KEY, TR_CHILDREN_STATE, TR_PARENT_KEY, TR_ROW_KEY, ROW_INDEX_KEY } from '@common/constants';
import {each, isElement, isNumber, isObject, isString, isUndefined, isValidArray} from '@jTool/utils';
import { compileEmptyTemplate, compileTd, sendCompile } from '@common/framework';
import { outError } from '@common/utils';
import moveRow from '@module/moveRow';
import checkbox from '@module/checkbox';
import fullColumn, { getFullColumnTr, getFullColumnInterval } from '@module/fullColumn';
import tree from '@module/tree';
import { treeElementKey } from '@module/tree/tool';
import { installSummary } from '@module/summary';
import { mergeRow } from '@module/merge';
import fixed from '@module/fixed';
import remind from '@module/remind';
import sort from '@module/sort';
import filter from '@module/filter';
import adjust from '@module/adjust';
import template from './template';
import { SettingObj, Column, TrObject, Row } from 'typings/types';
/**
* 重绘thead
* @param settings
*/
export const renderThead = (settings: SettingObj): void => {
const { _, columnMap, sortUpText, sortDownText, supportAdjust } = settings;
const $thead = getThead(_);
$thead.html(template.getTheadTpl({ settings }));
// 单个table下的TH
const $thList = getAllTh(_);
// 由于部分操作需要在th已经存在于dom的情况下执行, 所以存在以下循环
// 单个TH下的上层DIV
each($thList, (item: HTMLTableElement) => {
const onlyTH = jTool(item);
const onlyThWarp = jTool('.th-wrap', onlyTH);
const thName = onlyTH.attr(TH_NAME);
const column = columnMap[thName];
// 是否为GM自动添加的列
const isAutoCol = column.isAutoCreate;
// 嵌入表头提醒事件源
if (!isAutoCol && column.remind) {
onlyThWarp.append(jTool(remind.createHtml({ remind: column.remind })));
}
// 嵌入排序事件源
if (!isAutoCol && isString(column.sorting)) {
const sortingDom = jTool(sort.createHtml());
// 依据 column.sorting 进行初始显示
switch (column.sorting) {
case sortUpText:
sortingDom.addClass('sorting-up');
break;
case sortDownText:
sortingDom.addClass('sorting-down');
break;
}
onlyThWarp.append(sortingDom);
}
// 嵌入表头的筛选事件源
// 插件自动生成的序号列与选择列不做事件绑定
if (!isAutoCol && column.filter && isObject(column.filter)) {
const filterDom = jTool(filter.createHtml({settings, columnFilter: column.filter}));
onlyThWarp.append(filterDom);
}
// 嵌入宽度调整事件源,以下情况除外
// 1.插件自动生成的选择列和序号列不做事件绑定
// 2.禁止使用个性配置功能的列
if (supportAdjust && !isAutoCol && !column[DISABLE_CUSTOMIZE]) {
onlyThWarp.append(jTool(adjust.html));
}
});
};
/**
* 渲染为空DOM
* @param settings
* @param isInit
*/
export const renderEmptyTbody = (settings: SettingObj, isInit?: boolean): void => {
const { _, emptyTemplate } = settings;
// 当前为第一次加载 且 已经执行过setQuery 时,不再插入空数据模板
// 用于解决容器为不可见时,触发了setQuery的情况
if (isInit && getTableData(_, true).length !== 0) {
return;
}
const $tableDiv = getDiv(_);
$tableDiv.addClass(EMPTY_DATA_CLASS_NAME);
getTbody(_).html(`<tr ${EMPTY_TPL_KEY}="${_}" style="height: ${$tableDiv.height() - 1 + PX}"><td colspan="${getVisibleTh(_).length}"></td></tr>`);
const emptyTd = getEmpty(_).get(0).querySelector('td');
emptyTd.innerHTML = compileEmptyTemplate(settings, emptyTd, emptyTemplate);
// 解析框架: 空模板
sendCompile(settings);
};
/**
* 重新组装table body: 这个方法最大的性能问题在于tbody过大时,首次获取tbody或其父容器时过慢
* @param settings
* @param bodyList
* @param isVirtualScroll: 当前是否为虚拟滚动
* @param firstTrCacheKey
* @param lastTrCacheKey
*/
export const renderTbody = async (settings: SettingObj, bodyList: Array<Row>, isVirtualScroll: boolean, firstTrCacheKey: string, lastTrCacheKey: string): Promise<any> => {
const {
_,
columnMap,
supportTreeData,
supportCheckbox,
supportMoveRow,
treeConfig,
__isNested,
__isFullColumn
} = settings;
const { treeKey, openState } = treeConfig;
// tbody dom
const $tbody = getTbody(_);
const tbody = $tbody.get(0);
// 清除数据为空时的dom
const $emptyTr = $tbody.find(`[${EMPTY_TPL_KEY}="${_}"]`);
if ($emptyTr.length) {
$emptyTr.remove();
}
// 存储tr对像列表
let trObjectList: Array<TrObject> = [];
// 通过index对columnMap进行排序
const topList: Array<Column> = [];
const columnList: Array<Column> = [];
each(columnMap, (key: string, col: Column) => {
if (!col.pk) {
topList[col.index] = col;
}
});
const pushList = (list: Array<Column>) => {
each(list, (col: Column) => {
if (!isValidArray(col.children)) {
columnList.push(col);
return;
}
pushList(col.children);
});
};
pushList(topList);
// 插入常规的TR
const installNormal = (trObject: TrObject, row: Row, rowIndex: number, isTop: boolean): void => {
// 与当前位置信息匹配的td列表
const tdList = trObject.tdList;
each(columnList, (col: Column) => {
const tdTemplate = col.template;
if (col.isAutoCreate) {
tdList.push(tdTemplate(row[col.key], row, rowIndex, isTop));
return;
}
let { text, compileAttr } = compileTd(settings, tdTemplate, row, rowIndex, col.key);
const alignAttr = col.align ? `align=${col.align}` : '';
const moveRowAttr = supportMoveRow ? moveRow.addSign(col) : '';
const useRowCheckAttr = supportCheckbox ? checkbox.addSign(col) : '';
const fixedAttr = col.fixed ? `fixed=${col.fixed}` : '';
text = isElement(text) ? text.outerHTML : text;
tdList.push(`<td ${compileAttr} ${alignAttr} ${moveRowAttr} ${useRowCheckAttr} ${fixedAttr}>${text}</td>`);
});
};
try {
const installTr = (list: Array<Row>, level: number, pIndex?: string): void => {
const isTop = isUndefined(pIndex);
each(list, (row: Row, index: number) => {
const className = [];
const attribute = [];
const tdList: Array<string> = [];
const cacheKey = row[TR_CACHE_KEY];
// 增加行 class name
if (row[ROW_CLASS_NAME]) {
className.push(row[ROW_CLASS_NAME]);
}
// 非顶层
if (!isTop) {
attribute.push([TR_PARENT_KEY, pIndex]);
attribute.push([TR_CHILDREN_STATE, openState]);
}
// 顶层 且当前为树形结构
if (isTop && supportTreeData) {
// 不直接使用css odd是由于存在层级数据时无法排除折叠元素
index % 2 === 0 && attribute.push([ODD, '']);
}
attribute.push([TR_CACHE_KEY, cacheKey]);
const trObject: TrObject = {
className,
attribute,
row,
querySelector: `[${TR_CACHE_KEY}="${cacheKey}"]`,
tdList
};
// 顶层结构: 通栏-top
if (isTop && __isFullColumn) {
fullColumn.addTop(settings, row, index, trObjectList);
}
// 插入正常的TR
installNormal(trObject, row, index, isTop);
trObjectList.push(trObject);
// 顶层结构: 通栏-bottom
if (isTop && __isFullColumn) {
fullColumn.addBottom(settings, row, index, trObjectList);
}
// 处理层级结构
if (supportTreeData) {
const children = row[treeKey];
const hasChildren = children && children.length;
// 当前为更新时,保留原状态
let state;
const $treeElement = $tbody.find(`${trObject.querySelector} [${treeElementKey}]`);
if ($treeElement.length) {
state = $treeElement.attr(treeElementKey) === 'true';
}
// 添加tree map
tree.add(_, cacheKey, level, hasChildren, state);
// 递归处理层极结构
if (hasChildren) {
installTr(children, level + 1, cacheKey);
}
}
});
};
installTr(bodyList, 0);
// 插入汇总行: 验证在函数内
installSummary(settings, columnList, trObjectList);
const prependFragment = document.createDocumentFragment();
const df = document.createDocumentFragment();
const $tr = $tbody.find('tr');
each($tr, (item: HTMLTableRowElement) => {
df.appendChild(item);
});
tbody.innerHTML = '';
// 清除与数据不匹配的tr
if (df.children.length) {
let firstLineIndex: number;
let lastLineIndex: number;
// 处理开始行: 需要验证上通栏行
let firstTr = getFullColumnTr(df, 'top', firstTrCacheKey);
if (!firstTr) {
firstTr = df.querySelector(`[${TR_CACHE_KEY}="${firstTrCacheKey}"]`);
}
if (firstTr) {
firstLineIndex = [].indexOf.call(df.children, firstTr);
}
// 处理结束行: 需要验证分割行
let lastTr = getFullColumnInterval(df, lastTrCacheKey);
if (!lastTr) {
lastTr = df.querySelector(`[${TR_CACHE_KEY}="${lastTrCacheKey}"]`);
}
if (lastTr) {
lastLineIndex = [].indexOf.call(df.children, lastTr);
}
const list: Array<HTMLTableRowElement> = [];
each(df.children, (item: HTMLTableRowElement, index: number) => {
// DOM中不存在开始行与结束行的tr: 清空所有tr
if (!isNumber(firstLineIndex) && !isNumber(lastLineIndex)) {
list.push(item);
return;
}
// DOM中存在开始行的tr: 清空小于开始的tr
if (isNumber(firstLineIndex) && index < firstLineIndex) {
list.push(item);
}
// DOM中存在结束行的tr: 清空大于结束行的tr
if (isNumber(lastLineIndex) && index > lastLineIndex) {
list.push(item);
}
});
each(list, (item: HTMLTableRowElement) => item.remove());
}
trObjectList.forEach(item => {
const { className, attribute, tdList, row, querySelector } = item;
const tdStr = tdList.join('');
// 差异化更新
// 通过dom节点上的属性反查dom
let tr = df.querySelector(querySelector);
if (tr) {
tr.innerHTML = tdStr;
} else {
tr = document.createElement('tr');
if (className.length) {
tr.className = className.join(' ');
}
attribute.forEach(attr => {
tr.setAttribute(attr[0], attr[1]);
});
tr.innerHTML = tdStr;
const | CHE_KEY}]`) as HTMLTableRowElement;
if (firstCacheTr && !isUndefined(row)) {
const firstNum = getRowData(_, firstCacheTr, true)[ROW_INDEX_KEY];
const nowNum = row[ROW_INDEX_KEY];
if (nowNum < firstNum) {
prependFragment.appendChild(tr);
} else {
df.appendChild(tr);
}
} else {
df.appendChild(tr);
}
}
// 将数据挂载至DOM
tr[TR_ROW_KEY] = row;
});
df.insertBefore(prependFragment, df.firstChild);
tbody.appendChild(df);
} catch (e) {
outError('render tbody error');
console.error(e);
}
// 非多层嵌套初始化显示状态: 多层嵌套不支持显示、隐藏操作
if (!__isNested) {
each(columnMap, (key: string, col: Column) => {
setAreVisible(_, key, col.isShow);
});
}
// 解析框架
await sendCompile(settings);
// 插入tree dom
supportTreeData && tree.insertDOM(_, treeConfig);
// 合并单元格
mergeRow(_, columnMap);
// 虚拟滚动无需执行以后逻辑
if (!isVirtualScroll) {
fixed.update(_);
// 增加tbody是否填充满标识
if ($tbody.height() >= getDiv(_).height()) {
$tbody.attr('filled', '');
} else {
$tbody.removeAttr('filled');
}
// 为最后一列的th, td增加标识: 嵌套表头不处理
if (!settings.__isNested) {
updateVisibleLast(_);
}
}
};
| firstCacheTr = df.querySelector(`[${TR_CA | conditional_block |
render.ts | import jTool from '@jTool';
import {getRowData, getTableData} from '@common/cache';
import { getAllTh, getDiv, getEmpty, getTbody, getThead, getVisibleTh, setAreVisible, updateVisibleLast } from '@common/base';
import { DISABLE_CUSTOMIZE, EMPTY_DATA_CLASS_NAME, EMPTY_TPL_KEY, ODD, PX, ROW_CLASS_NAME, TH_NAME, TR_CACHE_KEY, TR_CHILDREN_STATE, TR_PARENT_KEY, TR_ROW_KEY, ROW_INDEX_KEY } from '@common/constants';
import {each, isElement, isNumber, isObject, isString, isUndefined, isValidArray} from '@jTool/utils';
import { compileEmptyTemplate, compileTd, sendCompile } from '@common/framework';
import { outError } from '@common/utils';
import moveRow from '@module/moveRow';
import checkbox from '@module/checkbox';
import fullColumn, { getFullColumnTr, getFullColumnInterval } from '@module/fullColumn';
import tree from '@module/tree';
import { treeElementKey } from '@module/tree/tool';
import { installSummary } from '@module/summary';
import { mergeRow } from '@module/merge';
import fixed from '@module/fixed';
import remind from '@module/remind';
import sort from '@module/sort';
import filter from '@module/filter';
import adjust from '@module/adjust';
import template from './template';
import { SettingObj, Column, TrObject, Row } from 'typings/types';
/**
* 重绘thead
* @param settings
*/
export const renderThead = (settings: SettingObj): void => {
const { _, columnMap, sortUpText, sortDownText, supportAdjust } = settings;
const $thead = getThead(_);
$thead.html(template.getTheadTpl({ settings }));
// 单个table下的TH
const $thList = getAllTh(_);
// 由于部分操作需要在th已经存在于dom的情况下执行, 所以存在以下循环
// 单个TH下的上层DIV
each($thList, (item: HTMLTableElement) => {
const onlyTH = jTool(item);
const onlyThWarp = jTool('.th-wrap', onlyTH);
const thName = onlyTH.attr(TH_NAME);
const column = columnMap[thName];
// 是否为GM自动添加的列
const isAutoCol = column.isAutoCreate;
// 嵌入表头提醒事件源
if (!isAutoCol && column.remind) {
onlyThWarp.append(jTool(remind.createHtml({ remind: column.remind })));
}
// 嵌入排序事件源
if (!isAutoCol && isString(column.sorting)) {
const sortingDom = jTool(sort.createHtml());
// 依据 column.sorting 进行初始显示
switch (column.sorting) {
case sortUpText:
sortingDom.addClass('sorting-up');
break;
case sortDownText:
sortingDom.addClass('sorting-down');
break;
}
onlyThWarp.append(sortingDom);
}
// 嵌入表头的筛选事件源
// 插件自动生成的序号列与选择列不做事件绑定
if (!isAutoCol && column.filter && isObject(column.filter)) {
const filterDom = jTool(filter.createHtml({settings, columnFilter: column.filter}));
onlyThWarp.append(filterDom);
}
// 嵌入宽度调整事件源,以下情况除外
// 1.插件自动生成的选择列和序号列不做事件绑定
// 2.禁止使用个性配置功能的列
if (supportAdjust && !isAutoCol && !column[DISABLE_CUSTOMIZE]) {
onlyThWarp.append(jTool(adjust.html));
}
});
};
/**
* 渲染为空DOM
* @param settings
* @param isInit
*/
export const renderEmptyTbody = (settings: SettingObj, isInit?: boolean): void => {
const { _, emptyTemplate } = settings;
// 当前为第一次加载 且 已经执行过setQuery 时,不再插入空数据模板
// 用于解决容器为不可见时,触发了setQuery的情况
if (isInit && getTableData(_, true).length !== 0) {
return;
}
const $tableDiv = getDiv(_);
$tableDiv.addClass(EMPTY_DATA_CLASS_NAME);
getTbody(_).html(`<tr ${EMPTY_TPL_KEY}="${_}" style="height: ${$tableDiv.height() - 1 + PX}"><td colspan="${getVisibleTh(_).length}"></td></tr>`);
const emptyTd = getEmpty(_).get(0).querySelector('td');
emptyTd.innerHTML = compileEmptyTemplate(settings, emptyTd, emptyTemplate);
// 解析框架: 空模板
sendCompile(settings);
};
/**
* 重新组装table body: 这个方法最大的性能问题在于tbody过大时,首次获取tbody或其父容器时过慢
* @param settings
* @param bodyList
* @param isVirtualScroll: 当前是否为虚拟滚动
* @param firstTrCacheKey
* @param lastTrCacheKey
*/
export const renderTbody = async (settings: SettingObj, bodyList: Array<Row>, isVirtualScroll: boolean, firstTrCacheKey: string, lastTrCacheKey: string): Promise<any> => {
const {
_,
columnMap,
supportTreeData,
supportCheckbox,
supportMoveRow,
treeConfig,
__isNested,
__isFullColumn
} = settings;
const { treeKey, openState } = treeConfig;
// tbody dom
const $tbody = getTbody(_);
const tbody = $tbody.get(0);
// 清除数据为空时的dom
const $emptyTr = $tbody.find(`[${EMPTY_TPL_KEY}="${_}"]`);
if ($emptyTr.length) {
$emptyTr.remove();
}
// 存储tr对像列表
let trObjectList: Array<TrObject> = [];
// 通过index对columnMap进行排序
const topList: Array<Column> = [];
const columnList: Array<Column> = [];
each(columnMap, (key: string, col: Column) => {
if (!col.pk) {
topList[col.index] = col;
}
});
const pushList = (list: Array<Column>) => {
each(list, (col: Column) => {
if (!isValidArray(col.children)) {
columnList.push(col);
return;
}
pushList(col.children);
});
};
pushList(topList);
// 插入常规的TR
const installNormal = (trObject: TrObject, row: Row, rowIndex: number, isTop: boolean): void => {
// 与当前位置信息匹配的td列表
const tdList = trObject.tdList;
each(columnList, (col: Column) => {
const tdTemplate = col.template;
if (col.isAutoCreate) {
tdList.push(tdTemplate(row[col.key], row, rowIndex, isTop));
return;
}
let { text, compileAttr } = compileTd(settings, tdTemplate, row, rowIndex, col.key);
const alignAttr = col.align ? `align=${col.align}` : '';
const moveRowAttr = supportMoveRow ? moveRow.addSign(col) : '';
const useRowCheckAttr = supportCheckbox ? checkbox.addSign(col) : '';
const fixedAttr = col.fixed ? `fixed=${col.fixed}` : '';
text = isElement(text) ? text.outerHTML : text;
tdList.push(`<td ${compileAttr} ${alignAttr} ${moveRowAttr} ${useRowCheckAttr} ${fixedAttr}>${text}</td>`);
});
};
try {
const installTr = (list: Array<Row>, level: number, pIndex?: string): void => {
const isTop = isUndefined(pIndex);
each(list, (row: Row, index: number) => {
const className = [];
const attribute = [];
const tdList: Array<string> = [];
const cacheKey = row[TR_CACHE_KEY];
// 增加行 class name
if (row[ROW_CLASS_NAME]) {
className.push(row[ROW_CLASS_NAME]);
}
// 非顶层
if (!isTop) {
attribute.push([TR_PARENT_KEY, pIndex]);
attribute.push([TR_CHILDREN_STATE, openState]);
}
// 顶层 且当前为树形结构
if (isTop && supportTreeData) {
// 不直接使用css odd是由于存在层级数据时无法排除折叠元素
index % 2 === 0 && attribute.push([ODD, '']);
}
attribute.push([TR_CACHE_KEY, cacheKey]);
const trObject: TrObject = {
className,
attribute,
row,
querySelector: `[${TR_CACHE_KEY}="${cacheKey}"]`,
tdList
};
// 顶层结构: 通栏-top
if (isTop && __isFullColumn) {
fullColumn.addTop(settings, row, index, trObjectList);
}
// 插入正常的TR
installNormal(trObject, row, index, isTop);
trObjectList.push(trObject);
// 顶层结构: 通栏-bottom
if (isTop && __isFullColumn) {
fullColumn.addBottom(settings, row, index, trObjectList);
}
// 处理层级结构
if (supportTreeData) {
const children = row[treeKey];
const hasChildren = children && children.length;
// 当前为更新时,保留原状态
let state;
const $treeElement = $tbody.find(`${trObject.querySelector} [${treeElementKey}]`);
if ($treeElement.length) {
state = $treeElement.attr(treeElementKey) === 'true';
}
// 添加tree map
tree.add(_, cacheKey, level, hasChildren, state);
// 递归处理层极结构
if (hasChildren) {
installTr(children, level + 1, cacheKey);
}
}
});
};
installTr(bodyList, 0);
// 插入汇总行: 验证在函数内
installSummary(settings, columnList, trObjectList);
const prependFragment = document.createDocumentFragment();
const df = document.createDocumentFragment();
const $tr = $tbody.find('tr');
each($tr, (item: HTMLTableRowElement) => {
df.appendChild(item);
});
tbody.innerHTML = '';
// 清除与数据不匹配的tr
if (df.children.length) {
let firstLineIndex: number;
let lastLineIndex: number;
// 处理开始行: 需要验证上通栏行
let firstTr = getFullColumnTr(df, 'top', firstTrCacheKey);
if (!firstTr) {
firstTr = df.querySelector(`[${TR_CACHE_KEY}="${firstTrCacheKey}"]`);
}
if (firstTr) {
firstLineIndex = [].indexOf.call(df.children, firstTr);
}
// 处理结束行: 需要验证分割行
let lastTr = getFullColumnInterval(df, lastTrCacheKey);
if (!lastTr) {
lastTr = df.querySelector(`[${TR_CACHE_KEY}="${lastTrCacheKey}"]`);
}
if (lastTr) {
lastLineIndex = [].indexOf.call(df.children, lastTr);
}
const list: Array<HTMLTableRowElement> = []; | each(df.children, (item: HTMLTableRowElement, index: number) => {
// DOM中不存在开始行与结束行的tr: 清空所有tr
if (!isNumber(firstLineIndex) && !isNumber(lastLineIndex)) {
list.push(item);
return;
}
// DOM中存在开始行的tr: 清空小于开始的tr
if (isNumber(firstLineIndex) && index < firstLineIndex) {
list.push(item);
}
// DOM中存在结束行的tr: 清空大于结束行的tr
if (isNumber(lastLineIndex) && index > lastLineIndex) {
list.push(item);
}
});
each(list, (item: HTMLTableRowElement) => item.remove());
}
trObjectList.forEach(item => {
const { className, attribute, tdList, row, querySelector } = item;
const tdStr = tdList.join('');
// 差异化更新
// 通过dom节点上的属性反查dom
let tr = df.querySelector(querySelector);
if (tr) {
tr.innerHTML = tdStr;
} else {
tr = document.createElement('tr');
if (className.length) {
tr.className = className.join(' ');
}
attribute.forEach(attr => {
tr.setAttribute(attr[0], attr[1]);
});
tr.innerHTML = tdStr;
const firstCacheTr = df.querySelector(`[${TR_CACHE_KEY}]`) as HTMLTableRowElement;
if (firstCacheTr && !isUndefined(row)) {
const firstNum = getRowData(_, firstCacheTr, true)[ROW_INDEX_KEY];
const nowNum = row[ROW_INDEX_KEY];
if (nowNum < firstNum) {
prependFragment.appendChild(tr);
} else {
df.appendChild(tr);
}
} else {
df.appendChild(tr);
}
}
// 将数据挂载至DOM
tr[TR_ROW_KEY] = row;
});
df.insertBefore(prependFragment, df.firstChild);
tbody.appendChild(df);
} catch (e) {
outError('render tbody error');
console.error(e);
}
// 非多层嵌套初始化显示状态: 多层嵌套不支持显示、隐藏操作
if (!__isNested) {
each(columnMap, (key: string, col: Column) => {
setAreVisible(_, key, col.isShow);
});
}
// 解析框架
await sendCompile(settings);
// 插入tree dom
supportTreeData && tree.insertDOM(_, treeConfig);
// 合并单元格
mergeRow(_, columnMap);
// 虚拟滚动无需执行以后逻辑
if (!isVirtualScroll) {
fixed.update(_);
// 增加tbody是否填充满标识
if ($tbody.height() >= getDiv(_).height()) {
$tbody.attr('filled', '');
} else {
$tbody.removeAttr('filled');
}
// 为最后一列的th, td增加标识: 嵌套表头不处理
if (!settings.__isNested) {
updateVisibleLast(_);
}
}
}; | random_line_split |
|
FileTypes.py | # Copyright (C) 2008 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <[email protected]>
from pycvsanaly2.Database import (SqliteDatabase, MysqlDatabase,
TableAlreadyExists, statement)
from pycvsanaly2.extensions import (Extension, register_extension,
ExtensionRunError)
from pycvsanaly2.extensions.file_types import guess_file_type
from pycvsanaly2.utils import to_utf8, uri_to_filename
class DBFileType(object):
id_counter = 1
__insert__ = """INSERT INTO file_types (id, file_id, type)
values (?, ?, ?)"""
def __init__(self, id, type, file_id):
if id is None:
self.id = DBFileType.id_counter
DBFileType.id_counter += 1
else:
self.id = id
self.type = to_utf8(type)
self.file_id = file_id
class FileTypes(Extension):
def __init__(self):
self.db = None
def __create_table(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, SqliteDatabase):
import sqlite3.dbapi2
try:
cursor.execute("CREATE TABLE file_types (" +
"id integer primary key," +
"file_id integer," +
"type varchar" +
")")
except sqlite3.dbapi2.OperationalError:
cursor.close()
raise TableAlreadyExists
except:
raise
elif isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("CREATE TABLE file_types (" +
"id INT primary key," +
"file_id integer REFERENCES files(id)," +
"type mediumtext" +
") CHARACTER SET=utf8")
except MySQLdb.OperationalError, e:
if e.args[0] == 1050:
cursor.close()
raise TableAlreadyExists
raise
except:
raise
cnn.commit()
cursor.close()
def __create_indices(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("create index parent_id on file_links(parent_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
try:
cursor.execute("create index repository_id on files(repository_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
|
cursor.close()
def __get_files_for_repository(self, repo_id, cursor):
query = "SELECT ft.file_id from file_types ft, files f " + \
"WHERE f.id = ft.file_id and f.repository_id = ?"
cursor.execute(statement(query, self.db.place_holder), (repo_id,))
files = [res[0] for res in cursor.fetchall()]
return files
def run(self, repo, uri, db):
self.db = db
path = uri_to_filename(uri)
if path is not None:
repo_uri = repo.get_uri_for_path(path)
else:
repo_uri = uri
cnn = self.db.connect()
cursor = cnn.cursor()
cursor.execute(statement("SELECT id from repositories where uri = ?",
db.place_holder), (repo_uri,))
repo_id = cursor.fetchone()[0]
files = []
try:
self.__create_table(cnn)
except TableAlreadyExists:
cursor.execute(statement("SELECT max(id) from file_types",
db.place_holder))
id = cursor.fetchone()[0]
if id is not None:
DBFileType.id_counter = id + 1
files = self.__get_files_for_repository(repo_id, cursor)
except Exception, e:
raise ExtensionRunError(str(e))
self.__create_indices(cnn)
query = """select distinct f.id fid, f.file_name fname
from files f
where f.repository_id = ?
and not exists (select id from file_links where parent_id = f.id)"""
cursor.execute(statement(query, db.place_holder), (repo_id,))
write_cursor = cnn.cursor()
rs = cursor.fetchmany()
while rs:
types = []
for file_id, file_name in rs:
if file_id in files:
continue
type = guess_file_type(file_name)
types.append(DBFileType(None, type, file_id))
if types:
file_types = [(type.id, type.file_id, type.type) \
for type in types]
write_cursor.executemany(statement(DBFileType.__insert__,
self.db.place_holder),
file_types)
rs = cursor.fetchmany()
cnn.commit()
write_cursor.close()
cursor.close()
cnn.close()
def backout(self, repo, uri, db):
update_statement = """delete from file_types where
file_id in (select id from files f
where f.repository_id = ?)"""
self._do_backout(repo, uri, db, update_statement)
register_extension("FileTypes", FileTypes)
| cursor.close()
raise | conditional_block |
FileTypes.py | # Copyright (C) 2008 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <[email protected]>
from pycvsanaly2.Database import (SqliteDatabase, MysqlDatabase,
TableAlreadyExists, statement)
from pycvsanaly2.extensions import (Extension, register_extension,
ExtensionRunError)
from pycvsanaly2.extensions.file_types import guess_file_type
from pycvsanaly2.utils import to_utf8, uri_to_filename
class DBFileType(object):
id_counter = 1
__insert__ = """INSERT INTO file_types (id, file_id, type)
values (?, ?, ?)"""
def __init__(self, id, type, file_id):
if id is None:
self.id = DBFileType.id_counter
DBFileType.id_counter += 1
else:
self.id = id
self.type = to_utf8(type)
self.file_id = file_id
class | (Extension):
def __init__(self):
self.db = None
def __create_table(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, SqliteDatabase):
import sqlite3.dbapi2
try:
cursor.execute("CREATE TABLE file_types (" +
"id integer primary key," +
"file_id integer," +
"type varchar" +
")")
except sqlite3.dbapi2.OperationalError:
cursor.close()
raise TableAlreadyExists
except:
raise
elif isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("CREATE TABLE file_types (" +
"id INT primary key," +
"file_id integer REFERENCES files(id)," +
"type mediumtext" +
") CHARACTER SET=utf8")
except MySQLdb.OperationalError, e:
if e.args[0] == 1050:
cursor.close()
raise TableAlreadyExists
raise
except:
raise
cnn.commit()
cursor.close()
def __create_indices(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("create index parent_id on file_links(parent_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
try:
cursor.execute("create index repository_id on files(repository_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
cursor.close()
def __get_files_for_repository(self, repo_id, cursor):
query = "SELECT ft.file_id from file_types ft, files f " + \
"WHERE f.id = ft.file_id and f.repository_id = ?"
cursor.execute(statement(query, self.db.place_holder), (repo_id,))
files = [res[0] for res in cursor.fetchall()]
return files
def run(self, repo, uri, db):
self.db = db
path = uri_to_filename(uri)
if path is not None:
repo_uri = repo.get_uri_for_path(path)
else:
repo_uri = uri
cnn = self.db.connect()
cursor = cnn.cursor()
cursor.execute(statement("SELECT id from repositories where uri = ?",
db.place_holder), (repo_uri,))
repo_id = cursor.fetchone()[0]
files = []
try:
self.__create_table(cnn)
except TableAlreadyExists:
cursor.execute(statement("SELECT max(id) from file_types",
db.place_holder))
id = cursor.fetchone()[0]
if id is not None:
DBFileType.id_counter = id + 1
files = self.__get_files_for_repository(repo_id, cursor)
except Exception, e:
raise ExtensionRunError(str(e))
self.__create_indices(cnn)
query = """select distinct f.id fid, f.file_name fname
from files f
where f.repository_id = ?
and not exists (select id from file_links where parent_id = f.id)"""
cursor.execute(statement(query, db.place_holder), (repo_id,))
write_cursor = cnn.cursor()
rs = cursor.fetchmany()
while rs:
types = []
for file_id, file_name in rs:
if file_id in files:
continue
type = guess_file_type(file_name)
types.append(DBFileType(None, type, file_id))
if types:
file_types = [(type.id, type.file_id, type.type) \
for type in types]
write_cursor.executemany(statement(DBFileType.__insert__,
self.db.place_holder),
file_types)
rs = cursor.fetchmany()
cnn.commit()
write_cursor.close()
cursor.close()
cnn.close()
def backout(self, repo, uri, db):
update_statement = """delete from file_types where
file_id in (select id from files f
where f.repository_id = ?)"""
self._do_backout(repo, uri, db, update_statement)
register_extension("FileTypes", FileTypes)
| FileTypes | identifier_name |
FileTypes.py | # Copyright (C) 2008 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <[email protected]>
from pycvsanaly2.Database import (SqliteDatabase, MysqlDatabase,
TableAlreadyExists, statement)
from pycvsanaly2.extensions import (Extension, register_extension,
ExtensionRunError)
from pycvsanaly2.extensions.file_types import guess_file_type
from pycvsanaly2.utils import to_utf8, uri_to_filename
class DBFileType(object):
id_counter = 1
__insert__ = """INSERT INTO file_types (id, file_id, type)
values (?, ?, ?)"""
def __init__(self, id, type, file_id):
if id is None:
self.id = DBFileType.id_counter
DBFileType.id_counter += 1
else:
self.id = id
self.type = to_utf8(type)
self.file_id = file_id
class FileTypes(Extension):
|
register_extension("FileTypes", FileTypes)
| def __init__(self):
self.db = None
def __create_table(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, SqliteDatabase):
import sqlite3.dbapi2
try:
cursor.execute("CREATE TABLE file_types (" +
"id integer primary key," +
"file_id integer," +
"type varchar" +
")")
except sqlite3.dbapi2.OperationalError:
cursor.close()
raise TableAlreadyExists
except:
raise
elif isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("CREATE TABLE file_types (" +
"id INT primary key," +
"file_id integer REFERENCES files(id)," +
"type mediumtext" +
") CHARACTER SET=utf8")
except MySQLdb.OperationalError, e:
if e.args[0] == 1050:
cursor.close()
raise TableAlreadyExists
raise
except:
raise
cnn.commit()
cursor.close()
def __create_indices(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("create index parent_id on file_links(parent_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
try:
cursor.execute("create index repository_id on files(repository_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
cursor.close()
def __get_files_for_repository(self, repo_id, cursor):
query = "SELECT ft.file_id from file_types ft, files f " + \
"WHERE f.id = ft.file_id and f.repository_id = ?"
cursor.execute(statement(query, self.db.place_holder), (repo_id,))
files = [res[0] for res in cursor.fetchall()]
return files
def run(self, repo, uri, db):
self.db = db
path = uri_to_filename(uri)
if path is not None:
repo_uri = repo.get_uri_for_path(path)
else:
repo_uri = uri
cnn = self.db.connect()
cursor = cnn.cursor()
cursor.execute(statement("SELECT id from repositories where uri = ?",
db.place_holder), (repo_uri,))
repo_id = cursor.fetchone()[0]
files = []
try:
self.__create_table(cnn)
except TableAlreadyExists:
cursor.execute(statement("SELECT max(id) from file_types",
db.place_holder))
id = cursor.fetchone()[0]
if id is not None:
DBFileType.id_counter = id + 1
files = self.__get_files_for_repository(repo_id, cursor)
except Exception, e:
raise ExtensionRunError(str(e))
self.__create_indices(cnn)
query = """select distinct f.id fid, f.file_name fname
from files f
where f.repository_id = ?
and not exists (select id from file_links where parent_id = f.id)"""
cursor.execute(statement(query, db.place_holder), (repo_id,))
write_cursor = cnn.cursor()
rs = cursor.fetchmany()
while rs:
types = []
for file_id, file_name in rs:
if file_id in files:
continue
type = guess_file_type(file_name)
types.append(DBFileType(None, type, file_id))
if types:
file_types = [(type.id, type.file_id, type.type) \
for type in types]
write_cursor.executemany(statement(DBFileType.__insert__,
self.db.place_holder),
file_types)
rs = cursor.fetchmany()
cnn.commit()
write_cursor.close()
cursor.close()
cnn.close()
def backout(self, repo, uri, db):
update_statement = """delete from file_types where
file_id in (select id from files f
where f.repository_id = ?)"""
self._do_backout(repo, uri, db, update_statement) | identifier_body |
FileTypes.py | # Copyright (C) 2008 LibreSoft
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors :
# Carlos Garcia Campos <[email protected]>
from pycvsanaly2.Database import (SqliteDatabase, MysqlDatabase,
TableAlreadyExists, statement)
from pycvsanaly2.extensions import (Extension, register_extension,
ExtensionRunError)
from pycvsanaly2.extensions.file_types import guess_file_type
from pycvsanaly2.utils import to_utf8, uri_to_filename
class DBFileType(object):
id_counter = 1
__insert__ = """INSERT INTO file_types (id, file_id, type)
values (?, ?, ?)"""
def __init__(self, id, type, file_id):
if id is None:
self.id = DBFileType.id_counter
DBFileType.id_counter += 1
else:
self.id = id
self.type = to_utf8(type)
self.file_id = file_id
class FileTypes(Extension):
def __init__(self):
self.db = None
def __create_table(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, SqliteDatabase):
import sqlite3.dbapi2
try:
cursor.execute("CREATE TABLE file_types (" +
"id integer primary key," +
"file_id integer," +
"type varchar" +
")")
except sqlite3.dbapi2.OperationalError:
cursor.close()
raise TableAlreadyExists
except:
raise
elif isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("CREATE TABLE file_types (" +
"id INT primary key," +
"file_id integer REFERENCES files(id)," +
"type mediumtext" +
") CHARACTER SET=utf8")
except MySQLdb.OperationalError, e:
if e.args[0] == 1050:
cursor.close()
raise TableAlreadyExists
raise
except:
raise
cnn.commit()
cursor.close()
def __create_indices(self, cnn):
cursor = cnn.cursor()
if isinstance(self.db, MysqlDatabase):
import MySQLdb
try:
cursor.execute("create index parent_id on file_links(parent_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
try:
cursor.execute("create index repository_id on files(repository_id)")
except MySQLdb.OperationalError, e:
if e.args[0] != 1061:
cursor.close()
raise
cursor.close()
def __get_files_for_repository(self, repo_id, cursor):
query = "SELECT ft.file_id from file_types ft, files f " + \
"WHERE f.id = ft.file_id and f.repository_id = ?"
cursor.execute(statement(query, self.db.place_holder), (repo_id,))
files = [res[0] for res in cursor.fetchall()]
return files
def run(self, repo, uri, db):
self.db = db
path = uri_to_filename(uri)
if path is not None:
repo_uri = repo.get_uri_for_path(path)
else:
repo_uri = uri
|
cursor = cnn.cursor()
cursor.execute(statement("SELECT id from repositories where uri = ?",
db.place_holder), (repo_uri,))
repo_id = cursor.fetchone()[0]
files = []
try:
self.__create_table(cnn)
except TableAlreadyExists:
cursor.execute(statement("SELECT max(id) from file_types",
db.place_holder))
id = cursor.fetchone()[0]
if id is not None:
DBFileType.id_counter = id + 1
files = self.__get_files_for_repository(repo_id, cursor)
except Exception, e:
raise ExtensionRunError(str(e))
self.__create_indices(cnn)
query = """select distinct f.id fid, f.file_name fname
from files f
where f.repository_id = ?
and not exists (select id from file_links where parent_id = f.id)"""
cursor.execute(statement(query, db.place_holder), (repo_id,))
write_cursor = cnn.cursor()
rs = cursor.fetchmany()
while rs:
types = []
for file_id, file_name in rs:
if file_id in files:
continue
type = guess_file_type(file_name)
types.append(DBFileType(None, type, file_id))
if types:
file_types = [(type.id, type.file_id, type.type) \
for type in types]
write_cursor.executemany(statement(DBFileType.__insert__,
self.db.place_holder),
file_types)
rs = cursor.fetchmany()
cnn.commit()
write_cursor.close()
cursor.close()
cnn.close()
def backout(self, repo, uri, db):
update_statement = """delete from file_types where
file_id in (select id from files f
where f.repository_id = ?)"""
self._do_backout(repo, uri, db, update_statement)
register_extension("FileTypes", FileTypes) | cnn = self.db.connect() | random_line_split |
migrate.ts | import { json5Require, makeFile } from './file-util'
import * as fs from 'fs'
import * as path from 'path'
async function migrate(): Promise<void> {
const filePath: string = path.resolve(process.cwd(), 'jmockr.config.json')
if (!fs.existsSync(filePath)) {
console.error(`Can't find config file [${filePath}]`)
throw new Error(`Can't find config file [${filePath}]`)
}
try {
const json: any = json5Require(filePath)
json.templateType = 'freemarker'
json.templateRoot = json.ftlFilePath
const originDataPath: any = json.dataPath
if (!fs.existsSync(originDataPath.ajax)) {
throw new Error('ajax folder not found')
}
const commonAsyncDataPath: string = originDataPath.ajax + '_migrate-common'
const newRetCode200Folder: string = path.resolve(commonAsyncDataPath, 'retCode200')
if (!fs.existsSync(commonAsyncDataPath)) {
fs.mkdirSync(commonAsyncDataPath)
}
if (!fs.existsSync(newRetCode200Folder)) |
const retCode200URLs: string = fs.readFileSync(originDataPath.url200, { encoding: 'utf8' })
const newUrl200Path: string = path.resolve(newRetCode200Folder, `url.json5`)
await makeFile({
mode: 'w',
path: newUrl200Path,
content: retCode200URLs,
})
const newURL200DataPath: string = path.resolve(newRetCode200Folder, 'data.json5')
await makeFile({
mode: 'w',
path: newURL200DataPath,
content: JSON.stringify({ retCode: 200 }, null, 4),
})
json.dataPath = {
urlMap: originDataPath.urlMap,
commonSync: originDataPath.commonFtl,
commonAsync: commonAsyncDataPath,
pageSync: originDataPath.pageFtl,
pageAsync: originDataPath.ajax,
}
json.authConfig.casDomain = ''
delete json.ftlFilePath
const newContent: string = JSON.stringify(json, null, 4)
fs.writeFileSync(filePath, newContent)
console.info(`url200 file is moved to ${newRetCode200Folder}`)
process.exit(0)
} catch (e) {
throw e
}
}
export {
migrate,
}
| {
fs.mkdirSync(newRetCode200Folder)
} | conditional_block |
migrate.ts | import { json5Require, makeFile } from './file-util'
import * as fs from 'fs'
import * as path from 'path'
async function migrate(): Promise<void> |
export {
migrate,
}
| {
const filePath: string = path.resolve(process.cwd(), 'jmockr.config.json')
if (!fs.existsSync(filePath)) {
console.error(`Can't find config file [${filePath}]`)
throw new Error(`Can't find config file [${filePath}]`)
}
try {
const json: any = json5Require(filePath)
json.templateType = 'freemarker'
json.templateRoot = json.ftlFilePath
const originDataPath: any = json.dataPath
if (!fs.existsSync(originDataPath.ajax)) {
throw new Error('ajax folder not found')
}
const commonAsyncDataPath: string = originDataPath.ajax + '_migrate-common'
const newRetCode200Folder: string = path.resolve(commonAsyncDataPath, 'retCode200')
if (!fs.existsSync(commonAsyncDataPath)) {
fs.mkdirSync(commonAsyncDataPath)
}
if (!fs.existsSync(newRetCode200Folder)) {
fs.mkdirSync(newRetCode200Folder)
}
const retCode200URLs: string = fs.readFileSync(originDataPath.url200, { encoding: 'utf8' })
const newUrl200Path: string = path.resolve(newRetCode200Folder, `url.json5`)
await makeFile({
mode: 'w',
path: newUrl200Path,
content: retCode200URLs,
})
const newURL200DataPath: string = path.resolve(newRetCode200Folder, 'data.json5')
await makeFile({
mode: 'w',
path: newURL200DataPath,
content: JSON.stringify({ retCode: 200 }, null, 4),
})
json.dataPath = {
urlMap: originDataPath.urlMap,
commonSync: originDataPath.commonFtl,
commonAsync: commonAsyncDataPath,
pageSync: originDataPath.pageFtl,
pageAsync: originDataPath.ajax,
}
json.authConfig.casDomain = ''
delete json.ftlFilePath
const newContent: string = JSON.stringify(json, null, 4)
fs.writeFileSync(filePath, newContent)
console.info(`url200 file is moved to ${newRetCode200Folder}`)
process.exit(0)
} catch (e) {
throw e
}
} | identifier_body |
migrate.ts | import { json5Require, makeFile } from './file-util'
import * as fs from 'fs'
import * as path from 'path'
async function migrate(): Promise<void> {
const filePath: string = path.resolve(process.cwd(), 'jmockr.config.json')
if (!fs.existsSync(filePath)) {
console.error(`Can't find config file [${filePath}]`)
throw new Error(`Can't find config file [${filePath}]`)
}
try {
const json: any = json5Require(filePath)
json.templateType = 'freemarker'
json.templateRoot = json.ftlFilePath
const originDataPath: any = json.dataPath
if (!fs.existsSync(originDataPath.ajax)) {
throw new Error('ajax folder not found')
}
const commonAsyncDataPath: string = originDataPath.ajax + '_migrate-common'
const newRetCode200Folder: string = path.resolve(commonAsyncDataPath, 'retCode200')
if (!fs.existsSync(commonAsyncDataPath)) {
fs.mkdirSync(commonAsyncDataPath)
}
if (!fs.existsSync(newRetCode200Folder)) {
fs.mkdirSync(newRetCode200Folder)
}
const retCode200URLs: string = fs.readFileSync(originDataPath.url200, { encoding: 'utf8' })
const newUrl200Path: string = path.resolve(newRetCode200Folder, `url.json5`)
await makeFile({
mode: 'w',
path: newUrl200Path,
content: retCode200URLs,
})
const newURL200DataPath: string = path.resolve(newRetCode200Folder, 'data.json5')
await makeFile({ | })
json.dataPath = {
urlMap: originDataPath.urlMap,
commonSync: originDataPath.commonFtl,
commonAsync: commonAsyncDataPath,
pageSync: originDataPath.pageFtl,
pageAsync: originDataPath.ajax,
}
json.authConfig.casDomain = ''
delete json.ftlFilePath
const newContent: string = JSON.stringify(json, null, 4)
fs.writeFileSync(filePath, newContent)
console.info(`url200 file is moved to ${newRetCode200Folder}`)
process.exit(0)
} catch (e) {
throw e
}
}
export {
migrate,
} | mode: 'w',
path: newURL200DataPath,
content: JSON.stringify({ retCode: 200 }, null, 4), | random_line_split |
migrate.ts | import { json5Require, makeFile } from './file-util'
import * as fs from 'fs'
import * as path from 'path'
async function | (): Promise<void> {
const filePath: string = path.resolve(process.cwd(), 'jmockr.config.json')
if (!fs.existsSync(filePath)) {
console.error(`Can't find config file [${filePath}]`)
throw new Error(`Can't find config file [${filePath}]`)
}
try {
const json: any = json5Require(filePath)
json.templateType = 'freemarker'
json.templateRoot = json.ftlFilePath
const originDataPath: any = json.dataPath
if (!fs.existsSync(originDataPath.ajax)) {
throw new Error('ajax folder not found')
}
const commonAsyncDataPath: string = originDataPath.ajax + '_migrate-common'
const newRetCode200Folder: string = path.resolve(commonAsyncDataPath, 'retCode200')
if (!fs.existsSync(commonAsyncDataPath)) {
fs.mkdirSync(commonAsyncDataPath)
}
if (!fs.existsSync(newRetCode200Folder)) {
fs.mkdirSync(newRetCode200Folder)
}
const retCode200URLs: string = fs.readFileSync(originDataPath.url200, { encoding: 'utf8' })
const newUrl200Path: string = path.resolve(newRetCode200Folder, `url.json5`)
await makeFile({
mode: 'w',
path: newUrl200Path,
content: retCode200URLs,
})
const newURL200DataPath: string = path.resolve(newRetCode200Folder, 'data.json5')
await makeFile({
mode: 'w',
path: newURL200DataPath,
content: JSON.stringify({ retCode: 200 }, null, 4),
})
json.dataPath = {
urlMap: originDataPath.urlMap,
commonSync: originDataPath.commonFtl,
commonAsync: commonAsyncDataPath,
pageSync: originDataPath.pageFtl,
pageAsync: originDataPath.ajax,
}
json.authConfig.casDomain = ''
delete json.ftlFilePath
const newContent: string = JSON.stringify(json, null, 4)
fs.writeFileSync(filePath, newContent)
console.info(`url200 file is moved to ${newRetCode200Folder}`)
process.exit(0)
} catch (e) {
throw e
}
}
export {
migrate,
}
| migrate | identifier_name |
renderers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template import RequestContext
from rest_framework import renderers
class CMSPageRenderer(renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which is able to render CMS pages containing the templatetag
`{% render_placeholder ... %}`, and which accept ordinary Python objects in their rendering
context.
The serialized data object, as available to other REST renderers, is explicitly added to the
context as ``data``. Therefore keep in mind that templates for REST's `TemplateHTMLRenderer`
are not compatible with this renderer.
"""
def | (self, data, accepted_media_type=None, context=None):
request = context['request']
response = context['response']
if response.exception:
template = self.get_exception_template(response)
else:
view = context['view']
template_names = self.get_template_names(response, view)
template = self.resolve_template(template_names)
context['paginator'] = view.paginator
# set edit_mode, so that otherwise invisible placeholders can be edited inline
context['edit_mode'] = request.current_page.publisher_is_draft
try:
# DRF >= 3.4.2
template_context = self.get_template_context(context, context)
except AttributeError:
# Fallback for DRF < 3.4.2
template_context = self.resolve_context({}, request, response)
template_context['data'] = data
return template.render(template_context, request=request)
| render | identifier_name |
renderers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template import RequestContext
from rest_framework import renderers
class CMSPageRenderer(renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which is able to render CMS pages containing the templatetag
`{% render_placeholder ... %}`, and which accept ordinary Python objects in their rendering
context.
The serialized data object, as available to other REST renderers, is explicitly added to the
context as ``data``. Therefore keep in mind that templates for REST's `TemplateHTMLRenderer`
are not compatible with this renderer.
"""
def render(self, data, accepted_media_type=None, context=None):
request = context['request']
response = context['response']
if response.exception:
|
else:
view = context['view']
template_names = self.get_template_names(response, view)
template = self.resolve_template(template_names)
context['paginator'] = view.paginator
# set edit_mode, so that otherwise invisible placeholders can be edited inline
context['edit_mode'] = request.current_page.publisher_is_draft
try:
# DRF >= 3.4.2
template_context = self.get_template_context(context, context)
except AttributeError:
# Fallback for DRF < 3.4.2
template_context = self.resolve_context({}, request, response)
template_context['data'] = data
return template.render(template_context, request=request)
| template = self.get_exception_template(response) | conditional_block |
renderers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template import RequestContext |
class CMSPageRenderer(renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which is able to render CMS pages containing the templatetag
`{% render_placeholder ... %}`, and which accept ordinary Python objects in their rendering
context.
The serialized data object, as available to other REST renderers, is explicitly added to the
context as ``data``. Therefore keep in mind that templates for REST's `TemplateHTMLRenderer`
are not compatible with this renderer.
"""
def render(self, data, accepted_media_type=None, context=None):
request = context['request']
response = context['response']
if response.exception:
template = self.get_exception_template(response)
else:
view = context['view']
template_names = self.get_template_names(response, view)
template = self.resolve_template(template_names)
context['paginator'] = view.paginator
# set edit_mode, so that otherwise invisible placeholders can be edited inline
context['edit_mode'] = request.current_page.publisher_is_draft
try:
# DRF >= 3.4.2
template_context = self.get_template_context(context, context)
except AttributeError:
# Fallback for DRF < 3.4.2
template_context = self.resolve_context({}, request, response)
template_context['data'] = data
return template.render(template_context, request=request) | from rest_framework import renderers
| random_line_split |
renderers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template import RequestContext
from rest_framework import renderers
class CMSPageRenderer(renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which is able to render CMS pages containing the templatetag
`{% render_placeholder ... %}`, and which accept ordinary Python objects in their rendering
context.
The serialized data object, as available to other REST renderers, is explicitly added to the
context as ``data``. Therefore keep in mind that templates for REST's `TemplateHTMLRenderer`
are not compatible with this renderer.
"""
def render(self, data, accepted_media_type=None, context=None):
| request = context['request']
response = context['response']
if response.exception:
template = self.get_exception_template(response)
else:
view = context['view']
template_names = self.get_template_names(response, view)
template = self.resolve_template(template_names)
context['paginator'] = view.paginator
# set edit_mode, so that otherwise invisible placeholders can be edited inline
context['edit_mode'] = request.current_page.publisher_is_draft
try:
# DRF >= 3.4.2
template_context = self.get_template_context(context, context)
except AttributeError:
# Fallback for DRF < 3.4.2
template_context = self.resolve_context({}, request, response)
template_context['data'] = data
return template.render(template_context, request=request) | identifier_body |
|
hr_language.py | # -*- encoding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import tools
from openerp.osv import fields, orm
class hr_language(orm.Model):
_name = 'hr.language'
_columns = {
'name': fields.selection(tools.scan_languages(), 'Language', required=True),
'description': fields.char('Description', size=64, required=True, translate=True),
'employee_id': fields.many2one('hr.employee', 'Employee', required=True),
'read': fields.boolean('Read'), | 'speak': fields.boolean('Speak'),
}
_defaults = {
'read': True,
'write': True,
'speak': True,
}
class hr_employee(orm.Model):
_inherit = 'hr.employee'
_columns = {
'language_ids': fields.one2many('hr.language', 'employee_id', 'Languages'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | 'write': fields.boolean('Write'), | random_line_split |
hr_language.py | # -*- encoding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import tools
from openerp.osv import fields, orm
class hr_language(orm.Model):
_name = 'hr.language'
_columns = {
'name': fields.selection(tools.scan_languages(), 'Language', required=True),
'description': fields.char('Description', size=64, required=True, translate=True),
'employee_id': fields.many2one('hr.employee', 'Employee', required=True),
'read': fields.boolean('Read'),
'write': fields.boolean('Write'),
'speak': fields.boolean('Speak'),
}
_defaults = {
'read': True,
'write': True,
'speak': True,
}
class hr_employee(orm.Model):
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| _inherit = 'hr.employee'
_columns = {
'language_ids': fields.one2many('hr.language', 'employee_id', 'Languages'),
} | identifier_body |
hr_language.py | # -*- encoding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import tools
from openerp.osv import fields, orm
class hr_language(orm.Model):
_name = 'hr.language'
_columns = {
'name': fields.selection(tools.scan_languages(), 'Language', required=True),
'description': fields.char('Description', size=64, required=True, translate=True),
'employee_id': fields.many2one('hr.employee', 'Employee', required=True),
'read': fields.boolean('Read'),
'write': fields.boolean('Write'),
'speak': fields.boolean('Speak'),
}
_defaults = {
'read': True,
'write': True,
'speak': True,
}
class | (orm.Model):
_inherit = 'hr.employee'
_columns = {
'language_ids': fields.one2many('hr.language', 'employee_id', 'Languages'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| hr_employee | identifier_name |
livingsocial_spider.py | #! -*- coding: utf-8 -*-
"""
Web Scraper Project
Scrape data from a regularly updated website livingsocial.com and
save to a database (postgres).
Scrapy spider part - it actually performs scraping.
"""
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.contrib.loader import XPathItemLoader
from scrapy.contrib.loader.processor import Join, MapCompose
from scraper_app.items import LivingSocialDeal
class | (BaseSpider):
"""
Spider for regularly updated livingsocial.com site, San Francisco page
"""
name = "livingsocial"
allowed_domains = ["livingsocial.com"]
start_urls = ["https://www.livingsocial.com/cities/15-san-francisco"]
deals_list_xpath = '//li[@dealid]'
item_fields = {
'title': './/span[@itemscope]/meta[@itemprop="name"]/@content',
'link': './/a/@href',
'location': './/a/div[@class="deal-details"]/p[@class="location"]/text()',
'original_price': './/a/div[@class="deal-prices"]/div[@class="deal-strikethrough-price"]/div[@class="strikethrough-wrapper"]/text()',
'price': './/a/div[@class="deal-prices"]/div[@class="deal-price"]/text()',
'end_date': './/span[@itemscope]/meta[@itemprop="availabilityEnds"]/@content'
}
def parse(self, response):
"""
Default callback used by Scrapy to process downloaded responses
Testing contracts:
@url http://www.livingsocial.com/cities/15-san-francisco
@returns items 1
@scrapes title link
"""
selector = HtmlXPathSelector(response)
# iterate over deals
for deal in selector.xpath(self.deals_list_xpath):
loader = XPathItemLoader(LivingSocialDeal(), selector=deal)
# define processors
loader.default_input_processor = MapCompose(unicode.strip)
loader.default_output_processor = Join()
# iterate over fields and add xpaths to the loader
for field, xpath in self.item_fields.iteritems():
loader.add_xpath(field, xpath)
yield loader.load_item()
| LivingSocialSpider | identifier_name |
livingsocial_spider.py | #! -*- coding: utf-8 -*-
"""
Web Scraper Project
Scrape data from a regularly updated website livingsocial.com and
save to a database (postgres).
Scrapy spider part - it actually performs scraping.
"""
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.contrib.loader import XPathItemLoader
from scrapy.contrib.loader.processor import Join, MapCompose
from scraper_app.items import LivingSocialDeal
class LivingSocialSpider(BaseSpider):
"""
Spider for regularly updated livingsocial.com site, San Francisco page
"""
name = "livingsocial"
allowed_domains = ["livingsocial.com"]
start_urls = ["https://www.livingsocial.com/cities/15-san-francisco"]
deals_list_xpath = '//li[@dealid]'
item_fields = {
'title': './/span[@itemscope]/meta[@itemprop="name"]/@content',
'link': './/a/@href',
'location': './/a/div[@class="deal-details"]/p[@class="location"]/text()',
'original_price': './/a/div[@class="deal-prices"]/div[@class="deal-strikethrough-price"]/div[@class="strikethrough-wrapper"]/text()',
'price': './/a/div[@class="deal-prices"]/div[@class="deal-price"]/text()',
'end_date': './/span[@itemscope]/meta[@itemprop="availabilityEnds"]/@content'
}
def parse(self, response):
"""
Default callback used by Scrapy to process downloaded responses
Testing contracts:
@url http://www.livingsocial.com/cities/15-san-francisco
@returns items 1
@scrapes title link
"""
selector = HtmlXPathSelector(response) |
# iterate over deals
for deal in selector.xpath(self.deals_list_xpath):
loader = XPathItemLoader(LivingSocialDeal(), selector=deal)
# define processors
loader.default_input_processor = MapCompose(unicode.strip)
loader.default_output_processor = Join()
# iterate over fields and add xpaths to the loader
for field, xpath in self.item_fields.iteritems():
loader.add_xpath(field, xpath)
yield loader.load_item() | random_line_split |
|
livingsocial_spider.py | #! -*- coding: utf-8 -*-
"""
Web Scraper Project
Scrape data from a regularly updated website livingsocial.com and
save to a database (postgres).
Scrapy spider part - it actually performs scraping.
"""
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.contrib.loader import XPathItemLoader
from scrapy.contrib.loader.processor import Join, MapCompose
from scraper_app.items import LivingSocialDeal
class LivingSocialSpider(BaseSpider):
"""
Spider for regularly updated livingsocial.com site, San Francisco page
"""
name = "livingsocial"
allowed_domains = ["livingsocial.com"]
start_urls = ["https://www.livingsocial.com/cities/15-san-francisco"]
deals_list_xpath = '//li[@dealid]'
item_fields = {
'title': './/span[@itemscope]/meta[@itemprop="name"]/@content',
'link': './/a/@href',
'location': './/a/div[@class="deal-details"]/p[@class="location"]/text()',
'original_price': './/a/div[@class="deal-prices"]/div[@class="deal-strikethrough-price"]/div[@class="strikethrough-wrapper"]/text()',
'price': './/a/div[@class="deal-prices"]/div[@class="deal-price"]/text()',
'end_date': './/span[@itemscope]/meta[@itemprop="availabilityEnds"]/@content'
}
def parse(self, response):
"""
Default callback used by Scrapy to process downloaded responses
Testing contracts:
@url http://www.livingsocial.com/cities/15-san-francisco
@returns items 1
@scrapes title link
"""
selector = HtmlXPathSelector(response)
# iterate over deals
for deal in selector.xpath(self.deals_list_xpath):
loader = XPathItemLoader(LivingSocialDeal(), selector=deal)
# define processors
loader.default_input_processor = MapCompose(unicode.strip)
loader.default_output_processor = Join()
# iterate over fields and add xpaths to the loader
for field, xpath in self.item_fields.iteritems():
|
yield loader.load_item()
| loader.add_xpath(field, xpath) | conditional_block |
livingsocial_spider.py | #! -*- coding: utf-8 -*-
"""
Web Scraper Project
Scrape data from a regularly updated website livingsocial.com and
save to a database (postgres).
Scrapy spider part - it actually performs scraping.
"""
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.contrib.loader import XPathItemLoader
from scrapy.contrib.loader.processor import Join, MapCompose
from scraper_app.items import LivingSocialDeal
class LivingSocialSpider(BaseSpider):
| """
Spider for regularly updated livingsocial.com site, San Francisco page
"""
name = "livingsocial"
allowed_domains = ["livingsocial.com"]
start_urls = ["https://www.livingsocial.com/cities/15-san-francisco"]
deals_list_xpath = '//li[@dealid]'
item_fields = {
'title': './/span[@itemscope]/meta[@itemprop="name"]/@content',
'link': './/a/@href',
'location': './/a/div[@class="deal-details"]/p[@class="location"]/text()',
'original_price': './/a/div[@class="deal-prices"]/div[@class="deal-strikethrough-price"]/div[@class="strikethrough-wrapper"]/text()',
'price': './/a/div[@class="deal-prices"]/div[@class="deal-price"]/text()',
'end_date': './/span[@itemscope]/meta[@itemprop="availabilityEnds"]/@content'
}
def parse(self, response):
"""
Default callback used by Scrapy to process downloaded responses
Testing contracts:
@url http://www.livingsocial.com/cities/15-san-francisco
@returns items 1
@scrapes title link
"""
selector = HtmlXPathSelector(response)
# iterate over deals
for deal in selector.xpath(self.deals_list_xpath):
loader = XPathItemLoader(LivingSocialDeal(), selector=deal)
# define processors
loader.default_input_processor = MapCompose(unicode.strip)
loader.default_output_processor = Join()
# iterate over fields and add xpaths to the loader
for field, xpath in self.item_fields.iteritems():
loader.add_xpath(field, xpath)
yield loader.load_item() | identifier_body |
|
search.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import packaging.version
from elasticsearch_dsl import Date, Document, Float, Keyword, Text, analyzer
from warehouse.search.utils import doc_type
EmailAnalyzer = analyzer(
"email",
tokenizer="uax_url_email",
filter=["lowercase", "stop", "snowball"],
)
NameAnalyzer = analyzer(
"normalized_name",
tokenizer="lowercase",
filter=["lowercase", "word_delimiter"],
)
@doc_type
class Project(Document):
| name = Text()
normalized_name = Text(analyzer=NameAnalyzer)
version = Keyword(multi=True)
latest_version = Keyword()
summary = Text(analyzer="snowball")
description = Text(analyzer="snowball")
author = Text()
author_email = Text(analyzer=EmailAnalyzer)
maintainer = Text()
maintainer_email = Text(analyzer=EmailAnalyzer)
license = Text()
home_page = Keyword()
download_url = Keyword()
keywords = Text(analyzer="snowball")
platform = Keyword()
created = Date()
classifiers = Keyword(multi=True)
zscore = Float()
@classmethod
def from_db(cls, release):
obj = cls(meta={"id": release.normalized_name})
obj["name"] = release.name
obj["normalized_name"] = release.normalized_name
obj["version"] = sorted(
release.all_versions, key=lambda r: packaging.version.parse(r), reverse=True
)
obj["latest_version"] = release.latest_version
obj["summary"] = release.summary
obj["description"] = release.description
obj["author"] = release.author
obj["author_email"] = release.author_email
obj["maintainer"] = release.maintainer
obj["maintainer_email"] = release.maintainer_email
obj["home_page"] = release.home_page
obj["download_url"] = release.download_url
obj["keywords"] = release.keywords
obj["platform"] = release.platform
obj["created"] = release.created
obj["classifiers"] = release.classifiers
obj["zscore"] = release.zscore
return obj
class Index:
# make sure this class can match any index so it will always be used to
# deserialize data coming from elasticsearch.
name = "*" | identifier_body |
|
search.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import packaging.version
from elasticsearch_dsl import Date, Document, Float, Keyword, Text, analyzer
from warehouse.search.utils import doc_type
EmailAnalyzer = analyzer(
"email", | filter=["lowercase", "stop", "snowball"],
)
NameAnalyzer = analyzer(
"normalized_name",
tokenizer="lowercase",
filter=["lowercase", "word_delimiter"],
)
@doc_type
class Project(Document):
name = Text()
normalized_name = Text(analyzer=NameAnalyzer)
version = Keyword(multi=True)
latest_version = Keyword()
summary = Text(analyzer="snowball")
description = Text(analyzer="snowball")
author = Text()
author_email = Text(analyzer=EmailAnalyzer)
maintainer = Text()
maintainer_email = Text(analyzer=EmailAnalyzer)
license = Text()
home_page = Keyword()
download_url = Keyword()
keywords = Text(analyzer="snowball")
platform = Keyword()
created = Date()
classifiers = Keyword(multi=True)
zscore = Float()
@classmethod
def from_db(cls, release):
obj = cls(meta={"id": release.normalized_name})
obj["name"] = release.name
obj["normalized_name"] = release.normalized_name
obj["version"] = sorted(
release.all_versions, key=lambda r: packaging.version.parse(r), reverse=True
)
obj["latest_version"] = release.latest_version
obj["summary"] = release.summary
obj["description"] = release.description
obj["author"] = release.author
obj["author_email"] = release.author_email
obj["maintainer"] = release.maintainer
obj["maintainer_email"] = release.maintainer_email
obj["home_page"] = release.home_page
obj["download_url"] = release.download_url
obj["keywords"] = release.keywords
obj["platform"] = release.platform
obj["created"] = release.created
obj["classifiers"] = release.classifiers
obj["zscore"] = release.zscore
return obj
class Index:
# make sure this class can match any index so it will always be used to
# deserialize data coming from elasticsearch.
name = "*" | tokenizer="uax_url_email", | random_line_split |
search.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import packaging.version
from elasticsearch_dsl import Date, Document, Float, Keyword, Text, analyzer
from warehouse.search.utils import doc_type
EmailAnalyzer = analyzer(
"email",
tokenizer="uax_url_email",
filter=["lowercase", "stop", "snowball"],
)
NameAnalyzer = analyzer(
"normalized_name",
tokenizer="lowercase",
filter=["lowercase", "word_delimiter"],
)
@doc_type
class Project(Document):
name = Text()
normalized_name = Text(analyzer=NameAnalyzer)
version = Keyword(multi=True)
latest_version = Keyword()
summary = Text(analyzer="snowball")
description = Text(analyzer="snowball")
author = Text()
author_email = Text(analyzer=EmailAnalyzer)
maintainer = Text()
maintainer_email = Text(analyzer=EmailAnalyzer)
license = Text()
home_page = Keyword()
download_url = Keyword()
keywords = Text(analyzer="snowball")
platform = Keyword()
created = Date()
classifiers = Keyword(multi=True)
zscore = Float()
@classmethod
def from_db(cls, release):
obj = cls(meta={"id": release.normalized_name})
obj["name"] = release.name
obj["normalized_name"] = release.normalized_name
obj["version"] = sorted(
release.all_versions, key=lambda r: packaging.version.parse(r), reverse=True
)
obj["latest_version"] = release.latest_version
obj["summary"] = release.summary
obj["description"] = release.description
obj["author"] = release.author
obj["author_email"] = release.author_email
obj["maintainer"] = release.maintainer
obj["maintainer_email"] = release.maintainer_email
obj["home_page"] = release.home_page
obj["download_url"] = release.download_url
obj["keywords"] = release.keywords
obj["platform"] = release.platform
obj["created"] = release.created
obj["classifiers"] = release.classifiers
obj["zscore"] = release.zscore
return obj
class | :
# make sure this class can match any index so it will always be used to
# deserialize data coming from elasticsearch.
name = "*"
| Index | identifier_name |
primitives.rs | use super::defines::*;
//TODO: convert to macro with usage
//format!(indent!(5, "format:{}"), 6)
pub fn tabs(num: usize) -> String {
format!("{:1$}", "", TAB_SIZE * num)
}
pub fn format_block(prefix: &str, suffix: &str, body: &[String]) -> Vec<String> {
let mut v = Vec::new();
if !prefix.is_empty() {
v.push(prefix.into());
}
for s in body.iter() {
let s = format!("{}{}", TAB, s);
v.push(s);
}
if !suffix.is_empty() {
v.push(suffix.into());
}
v
}
pub fn format_block_one_line(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> String {
let mut s = format!("{}{}", prefix, outer_separator);
let mut first = true;
for s_ in body {
if first {
first = false;
s = s + s_;
} else {
s = s + inner_separator + s_;
}
}
s + outer_separator + suffix
}
pub fn format_block_smart(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> Vec<String> {
format_block_smart_width(
prefix,
suffix,
body,
outer_separator,
inner_separator,
MAX_TEXT_WIDTH,
)
}
pub fn format_block_smart_width(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
max_width: usize,
) -> Vec<String> {
let outer_len = prefix.len() + suffix.len() + 2 * outer_separator.len();
let mut inner_len = inner_separator.len() * (body.len() - 1);
//TODO: change to sum()
for s in body {
inner_len += s.len();
}
if (outer_len + inner_len) > max_width {
format_block(prefix, suffix, body)
} else {
let s = format_block_one_line(prefix, suffix, body, outer_separator, inner_separator);
vec![s]
}
}
pub fn comment_block(body: &[String]) -> Vec<String> |
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tabs() {
assert_eq!(tabs(0), "");
assert_eq!(tabs(1), TAB);
assert_eq!(tabs(2), format!("{0}{0}", TAB));
}
#[test]
fn test_format_block() {
let body = vec!["0 => 1,".into(), "1 => 0,".into()];
let actual = format_block("match a {", "}", &body);
let expected = ["match a {", " 0 => 1,", " 1 => 0,", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_one_line_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 14);
let expected = ["unsafe { f() }"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_many_lines_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 13);
let expected = ["unsafe {", " f()", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_one_line_inner_separator() {
let body = vec!["a: &str".into(), "b: &str".into()];
let actual = format_block_smart("f(", ")", &body, "", ", ");
let expected = ["f(a: &str, b: &str)"];
assert_eq!(actual, expected);
}
#[test]
fn test_comment_block() {
let body = vec!["f(a,".into(), " b)".into()];
let actual = comment_block(&body);
let expected = ["//f(a,", "// b)"];
assert_eq!(actual, expected);
}
}
| {
body.iter().map(|s| format!("//{}", s)).collect()
} | identifier_body |
primitives.rs | use super::defines::*;
//TODO: convert to macro with usage
//format!(indent!(5, "format:{}"), 6)
pub fn tabs(num: usize) -> String {
format!("{:1$}", "", TAB_SIZE * num)
}
pub fn format_block(prefix: &str, suffix: &str, body: &[String]) -> Vec<String> {
let mut v = Vec::new();
if !prefix.is_empty() {
v.push(prefix.into());
}
for s in body.iter() {
let s = format!("{}{}", TAB, s);
v.push(s);
}
if !suffix.is_empty() {
v.push(suffix.into());
}
v
}
pub fn format_block_one_line(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> String {
let mut s = format!("{}{}", prefix, outer_separator);
let mut first = true;
for s_ in body {
if first {
first = false;
s = s + s_;
} else {
s = s + inner_separator + s_;
}
}
s + outer_separator + suffix
}
pub fn format_block_smart(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> Vec<String> {
format_block_smart_width(
prefix,
suffix,
body,
outer_separator,
inner_separator,
MAX_TEXT_WIDTH,
)
}
pub fn format_block_smart_width(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
max_width: usize,
) -> Vec<String> {
let outer_len = prefix.len() + suffix.len() + 2 * outer_separator.len();
let mut inner_len = inner_separator.len() * (body.len() - 1);
//TODO: change to sum()
for s in body {
inner_len += s.len();
}
if (outer_len + inner_len) > max_width {
format_block(prefix, suffix, body)
} else |
}
pub fn comment_block(body: &[String]) -> Vec<String> {
body.iter().map(|s| format!("//{}", s)).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tabs() {
assert_eq!(tabs(0), "");
assert_eq!(tabs(1), TAB);
assert_eq!(tabs(2), format!("{0}{0}", TAB));
}
#[test]
fn test_format_block() {
let body = vec!["0 => 1,".into(), "1 => 0,".into()];
let actual = format_block("match a {", "}", &body);
let expected = ["match a {", " 0 => 1,", " 1 => 0,", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_one_line_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 14);
let expected = ["unsafe { f() }"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_many_lines_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 13);
let expected = ["unsafe {", " f()", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_one_line_inner_separator() {
let body = vec!["a: &str".into(), "b: &str".into()];
let actual = format_block_smart("f(", ")", &body, "", ", ");
let expected = ["f(a: &str, b: &str)"];
assert_eq!(actual, expected);
}
#[test]
fn test_comment_block() {
let body = vec!["f(a,".into(), " b)".into()];
let actual = comment_block(&body);
let expected = ["//f(a,", "// b)"];
assert_eq!(actual, expected);
}
}
| {
let s = format_block_one_line(prefix, suffix, body, outer_separator, inner_separator);
vec![s]
} | conditional_block |
primitives.rs | use super::defines::*;
//TODO: convert to macro with usage
//format!(indent!(5, "format:{}"), 6)
pub fn tabs(num: usize) -> String {
format!("{:1$}", "", TAB_SIZE * num)
}
pub fn format_block(prefix: &str, suffix: &str, body: &[String]) -> Vec<String> {
let mut v = Vec::new();
if !prefix.is_empty() {
v.push(prefix.into());
} | v.push(s);
}
if !suffix.is_empty() {
v.push(suffix.into());
}
v
}
pub fn format_block_one_line(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> String {
let mut s = format!("{}{}", prefix, outer_separator);
let mut first = true;
for s_ in body {
if first {
first = false;
s = s + s_;
} else {
s = s + inner_separator + s_;
}
}
s + outer_separator + suffix
}
pub fn format_block_smart(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> Vec<String> {
format_block_smart_width(
prefix,
suffix,
body,
outer_separator,
inner_separator,
MAX_TEXT_WIDTH,
)
}
pub fn format_block_smart_width(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
max_width: usize,
) -> Vec<String> {
let outer_len = prefix.len() + suffix.len() + 2 * outer_separator.len();
let mut inner_len = inner_separator.len() * (body.len() - 1);
//TODO: change to sum()
for s in body {
inner_len += s.len();
}
if (outer_len + inner_len) > max_width {
format_block(prefix, suffix, body)
} else {
let s = format_block_one_line(prefix, suffix, body, outer_separator, inner_separator);
vec![s]
}
}
pub fn comment_block(body: &[String]) -> Vec<String> {
body.iter().map(|s| format!("//{}", s)).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tabs() {
assert_eq!(tabs(0), "");
assert_eq!(tabs(1), TAB);
assert_eq!(tabs(2), format!("{0}{0}", TAB));
}
#[test]
fn test_format_block() {
let body = vec!["0 => 1,".into(), "1 => 0,".into()];
let actual = format_block("match a {", "}", &body);
let expected = ["match a {", " 0 => 1,", " 1 => 0,", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_one_line_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 14);
let expected = ["unsafe { f() }"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_many_lines_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 13);
let expected = ["unsafe {", " f()", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_one_line_inner_separator() {
let body = vec!["a: &str".into(), "b: &str".into()];
let actual = format_block_smart("f(", ")", &body, "", ", ");
let expected = ["f(a: &str, b: &str)"];
assert_eq!(actual, expected);
}
#[test]
fn test_comment_block() {
let body = vec!["f(a,".into(), " b)".into()];
let actual = comment_block(&body);
let expected = ["//f(a,", "// b)"];
assert_eq!(actual, expected);
}
} | for s in body.iter() {
let s = format!("{}{}", TAB, s); | random_line_split |
primitives.rs | use super::defines::*;
//TODO: convert to macro with usage
//format!(indent!(5, "format:{}"), 6)
pub fn tabs(num: usize) -> String {
format!("{:1$}", "", TAB_SIZE * num)
}
pub fn format_block(prefix: &str, suffix: &str, body: &[String]) -> Vec<String> {
let mut v = Vec::new();
if !prefix.is_empty() {
v.push(prefix.into());
}
for s in body.iter() {
let s = format!("{}{}", TAB, s);
v.push(s);
}
if !suffix.is_empty() {
v.push(suffix.into());
}
v
}
pub fn format_block_one_line(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> String {
let mut s = format!("{}{}", prefix, outer_separator);
let mut first = true;
for s_ in body {
if first {
first = false;
s = s + s_;
} else {
s = s + inner_separator + s_;
}
}
s + outer_separator + suffix
}
pub fn format_block_smart(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
) -> Vec<String> {
format_block_smart_width(
prefix,
suffix,
body,
outer_separator,
inner_separator,
MAX_TEXT_WIDTH,
)
}
pub fn format_block_smart_width(
prefix: &str,
suffix: &str,
body: &[String],
outer_separator: &str,
inner_separator: &str,
max_width: usize,
) -> Vec<String> {
let outer_len = prefix.len() + suffix.len() + 2 * outer_separator.len();
let mut inner_len = inner_separator.len() * (body.len() - 1);
//TODO: change to sum()
for s in body {
inner_len += s.len();
}
if (outer_len + inner_len) > max_width {
format_block(prefix, suffix, body)
} else {
let s = format_block_one_line(prefix, suffix, body, outer_separator, inner_separator);
vec![s]
}
}
pub fn comment_block(body: &[String]) -> Vec<String> {
body.iter().map(|s| format!("//{}", s)).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tabs() {
assert_eq!(tabs(0), "");
assert_eq!(tabs(1), TAB);
assert_eq!(tabs(2), format!("{0}{0}", TAB));
}
#[test]
fn | () {
let body = vec!["0 => 1,".into(), "1 => 0,".into()];
let actual = format_block("match a {", "}", &body);
let expected = ["match a {", " 0 => 1,", " 1 => 0,", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_one_line_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 14);
let expected = ["unsafe { f() }"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_width_many_lines_outer_separator() {
let body = vec!["f()".into()];
let actual = format_block_smart_width("unsafe {", "}", &body, " ", "", 13);
let expected = ["unsafe {", " f()", "}"];
assert_eq!(actual, expected);
}
#[test]
fn test_format_block_smart_one_line_inner_separator() {
let body = vec!["a: &str".into(), "b: &str".into()];
let actual = format_block_smart("f(", ")", &body, "", ", ");
let expected = ["f(a: &str, b: &str)"];
assert_eq!(actual, expected);
}
#[test]
fn test_comment_block() {
let body = vec!["f(a,".into(), " b)".into()];
let actual = comment_block(&body);
let expected = ["//f(a,", "// b)"];
assert_eq!(actual, expected);
}
}
| test_format_block | identifier_name |
task.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Basic multitasking interface.
use core::mem::size_of;
use core::intrinsics::abort;
use hal::cortex_m3::{sched, systick};
use hal::cortex_m3::sched::NoInterrupts;
use os::syscall::syscall;
use hal::stack;
/// Task takes one argument, which is u32.
pub type Task = fn(u32);
mod current_stack_offset {
/// Currently allocated stack memory, growing down, starting at __STACK_BASE.
static mut CurrentStackOffset: u32 = 0;
pub fn get() -> u32 {
unsafe { CurrentStackOffset }
}
pub fn set(val: u32) {
unsafe { CurrentStackOffset = val };
}
}
/// Bytes to reserve in privileged stack based on stack size at the time of task::setup() call.
static ReservedPivilegedStackSize: u32 = 256;
/// Maximum number of tasks.
static MaxTasksCount: uint = 4;
mod defined_tasks_count {
use core::intrinsics::abort;
/// Total defined tasks count.
static mut DefinedTasksCount: uint = 0;
pub fn get() -> uint {
unsafe { DefinedTasksCount }
}
pub fn increase() {
unsafe {
DefinedTasksCount += 1;
if DefinedTasksCount > super::MaxTasksCount {
abort();
}
}
}
}
pub enum Status {
Runnable,
Blocked
}
/// Task descriptor, provides task stack pointer.
pub struct TaskDescriptor {
pub stack_start: u32,
pub stack_end: u32,
pub status: Status
}
impl TaskDescriptor {
pub fn block(&mut self, _: NoInterrupts) {
self.status = Blocked;
sched::switch_context();
}
pub fn unblock(&mut self, _: &NoInterrupts) { self.status = Runnable; }
}
struct TasksCollection {
pub current_task: uint,
pub tasks: [TaskDescriptor, ..MaxTasksCount],
}
pub static mut Tasks: TasksCollection = TasksCollection {
current_task: 0,
tasks: [TaskDescriptor { stack_start: 0, stack_end: 0, status: Runnable }, ..MaxTasksCount]
};
impl TasksCollection {
pub fn current_task<'a>(&'a mut self) -> &'a mut TaskDescriptor {
&mut self.tasks[self.current_task]
}
fn next_task(&mut self) {
loop {
self.current_task += 1;
if self.current_task == defined_tasks_count::get() {
self.current_task = 0;
}
match self.current_task() {
&task if !task.valid() => {}
&TaskDescriptor {status: Runnable, ..} => break,
_ => {}
}
}
}
fn add_task(&mut self, t: TaskDescriptor) {
self.tasks[defined_tasks_count::get()] = t;
defined_tasks_count::increase();
}
}
/// Initialize and start task manager.
///
/// This function keeps main stack intact. It starts the task scheduler and
/// never returns.
///
/// t should point to initial task.
#[inline(never)]
pub fn setup(t: Task, stack_size: u32) {
systick::setup(::hal::cortex_m3::systick::CALIBRATED, true);
let current_stack = sched::get_current_stack_pointer();
// User tasks start at this current stack size + reserved size aligned by 4
// bytes.
let task_stack_base: u32 = (current_stack as u32 - ReservedPivilegedStackSize) & !3;
current_stack_offset::set(task_stack_base);
let td = define_task(t, 0, stack_size, true);
td.load();
systick::enable();
sched::switch_context();
unsafe { abort() };
}
#[inline(never)]
pub fn define_task(t: Task, arg: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
systick::disable_irq();
let task_base = current_stack_offset::get();
let task_stack_size: u32 = (
stack_size +
8*4 + // hw saved regs
8*4 + // sw saved regs
8*4 // scratch pad for __morestack failure. see note on morestack below.
) & !0b1111;
current_stack_offset::set(task_base - task_stack_size);
let td = TaskDescriptor::new(t, arg, task_base, stack_size, initial);
unsafe { Tasks.add_task(td) };
systick::enable_irq();
td
}
impl TaskDescriptor {
/// Creates a new TaskDescriptor for given task, arg and stack base.
///
/// This function initializes task stack with hw saved registers.
#[inline(never)]
pub fn new(t: Task, arg: u32, stack_base: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
let state = sched::SavedState::new(t, arg);
let mut stack_top: u32 = stack_base - size_of::<sched::SavedState>() as u32;
unsafe { *(stack_top as *mut sched::SavedState) = state };
if !initial {
stack_top -= 8*4;
}
TaskDescriptor { | stack_start: stack_top,
stack_end: stack_base - stack_size,
status: Runnable,
}
}
pub fn load(&self) {
sched::set_task_stack_pointer(self.stack_start);
stack::set_stack_limit(self.stack_end);
}
pub fn save(&mut self) {
self.stack_start = sched::get_task_stack_pointer();
}
pub fn valid(&self) -> bool {
self.stack_end != 0
}
pub fn invalidate(&mut self) {
self.stack_end = 0;
}
}
#[inline(always)]
pub unsafe fn task_scheduler() {
stack::set_stack_limit(stack::stack_base() - ReservedPivilegedStackSize);
Tasks.current_task().save();
Tasks.next_task();
Tasks.current_task().load();
}
// TODO(farcaller): this should not actually use stack!
// At the time of the call of syscall(), the stack is overflown by 4, we still
// have 12 bytes in reserve and 2*8*4 to save the frame in pendsv after kill.
#[no_split_stack]
pub fn morestack() {
let psp = sched::get_task_stack_pointer();
let sp = sched::get_current_stack_pointer();
if psp == sp {
unsafe { syscall(kill_current_task, 0) };
} else {
unsafe { abort() };
}
}
#[inline(never)]
#[no_mangle]
#[no_split_stack]
pub fn kill_current_task(_: u32) {
unsafe { Tasks.current_task().invalidate() };
sched::switch_context();
} | random_line_split |
|
task.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Basic multitasking interface.
use core::mem::size_of;
use core::intrinsics::abort;
use hal::cortex_m3::{sched, systick};
use hal::cortex_m3::sched::NoInterrupts;
use os::syscall::syscall;
use hal::stack;
/// Task takes one argument, which is u32.
pub type Task = fn(u32);
mod current_stack_offset {
/// Currently allocated stack memory, growing down, starting at __STACK_BASE.
static mut CurrentStackOffset: u32 = 0;
pub fn get() -> u32 {
unsafe { CurrentStackOffset }
}
pub fn set(val: u32) {
unsafe { CurrentStackOffset = val };
}
}
/// Bytes to reserve in privileged stack based on stack size at the time of task::setup() call.
static ReservedPivilegedStackSize: u32 = 256;
/// Maximum number of tasks.
static MaxTasksCount: uint = 4;
mod defined_tasks_count {
use core::intrinsics::abort;
/// Total defined tasks count.
static mut DefinedTasksCount: uint = 0;
pub fn get() -> uint {
unsafe { DefinedTasksCount }
}
pub fn increase() {
unsafe {
DefinedTasksCount += 1;
if DefinedTasksCount > super::MaxTasksCount {
abort();
}
}
}
}
pub enum Status {
Runnable,
Blocked
}
/// Task descriptor, provides task stack pointer.
pub struct TaskDescriptor {
pub stack_start: u32,
pub stack_end: u32,
pub status: Status
}
impl TaskDescriptor {
pub fn block(&mut self, _: NoInterrupts) {
self.status = Blocked;
sched::switch_context();
}
pub fn unblock(&mut self, _: &NoInterrupts) { self.status = Runnable; }
}
struct TasksCollection {
pub current_task: uint,
pub tasks: [TaskDescriptor, ..MaxTasksCount],
}
pub static mut Tasks: TasksCollection = TasksCollection {
current_task: 0,
tasks: [TaskDescriptor { stack_start: 0, stack_end: 0, status: Runnable }, ..MaxTasksCount]
};
impl TasksCollection {
pub fn current_task<'a>(&'a mut self) -> &'a mut TaskDescriptor {
&mut self.tasks[self.current_task]
}
fn next_task(&mut self) {
loop {
self.current_task += 1;
if self.current_task == defined_tasks_count::get() {
self.current_task = 0;
}
match self.current_task() {
&task if !task.valid() => {}
&TaskDescriptor {status: Runnable, ..} => break,
_ => {}
}
}
}
fn add_task(&mut self, t: TaskDescriptor) {
self.tasks[defined_tasks_count::get()] = t;
defined_tasks_count::increase();
}
}
/// Initialize and start task manager.
///
/// This function keeps main stack intact. It starts the task scheduler and
/// never returns.
///
/// t should point to initial task.
#[inline(never)]
pub fn setup(t: Task, stack_size: u32) {
systick::setup(::hal::cortex_m3::systick::CALIBRATED, true);
let current_stack = sched::get_current_stack_pointer();
// User tasks start at this current stack size + reserved size aligned by 4
// bytes.
let task_stack_base: u32 = (current_stack as u32 - ReservedPivilegedStackSize) & !3;
current_stack_offset::set(task_stack_base);
let td = define_task(t, 0, stack_size, true);
td.load();
systick::enable();
sched::switch_context();
unsafe { abort() };
}
#[inline(never)]
pub fn define_task(t: Task, arg: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
systick::disable_irq();
let task_base = current_stack_offset::get();
let task_stack_size: u32 = (
stack_size +
8*4 + // hw saved regs
8*4 + // sw saved regs
8*4 // scratch pad for __morestack failure. see note on morestack below.
) & !0b1111;
current_stack_offset::set(task_base - task_stack_size);
let td = TaskDescriptor::new(t, arg, task_base, stack_size, initial);
unsafe { Tasks.add_task(td) };
systick::enable_irq();
td
}
impl TaskDescriptor {
/// Creates a new TaskDescriptor for given task, arg and stack base.
///
/// This function initializes task stack with hw saved registers.
#[inline(never)]
pub fn new(t: Task, arg: u32, stack_base: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
let state = sched::SavedState::new(t, arg);
let mut stack_top: u32 = stack_base - size_of::<sched::SavedState>() as u32;
unsafe { *(stack_top as *mut sched::SavedState) = state };
if !initial {
stack_top -= 8*4;
}
TaskDescriptor {
stack_start: stack_top,
stack_end: stack_base - stack_size,
status: Runnable,
}
}
pub fn load(&self) {
sched::set_task_stack_pointer(self.stack_start);
stack::set_stack_limit(self.stack_end);
}
pub fn save(&mut self) {
self.stack_start = sched::get_task_stack_pointer();
}
pub fn valid(&self) -> bool {
self.stack_end != 0
}
pub fn | (&mut self) {
self.stack_end = 0;
}
}
#[inline(always)]
pub unsafe fn task_scheduler() {
stack::set_stack_limit(stack::stack_base() - ReservedPivilegedStackSize);
Tasks.current_task().save();
Tasks.next_task();
Tasks.current_task().load();
}
// TODO(farcaller): this should not actually use stack!
// At the time of the call of syscall(), the stack is overflown by 4, we still
// have 12 bytes in reserve and 2*8*4 to save the frame in pendsv after kill.
#[no_split_stack]
pub fn morestack() {
let psp = sched::get_task_stack_pointer();
let sp = sched::get_current_stack_pointer();
if psp == sp {
unsafe { syscall(kill_current_task, 0) };
} else {
unsafe { abort() };
}
}
#[inline(never)]
#[no_mangle]
#[no_split_stack]
pub fn kill_current_task(_: u32) {
unsafe { Tasks.current_task().invalidate() };
sched::switch_context();
}
| invalidate | identifier_name |
task.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Basic multitasking interface.
use core::mem::size_of;
use core::intrinsics::abort;
use hal::cortex_m3::{sched, systick};
use hal::cortex_m3::sched::NoInterrupts;
use os::syscall::syscall;
use hal::stack;
/// Task takes one argument, which is u32.
pub type Task = fn(u32);
mod current_stack_offset {
/// Currently allocated stack memory, growing down, starting at __STACK_BASE.
static mut CurrentStackOffset: u32 = 0;
pub fn get() -> u32 |
pub fn set(val: u32) {
unsafe { CurrentStackOffset = val };
}
}
/// Bytes to reserve in privileged stack based on stack size at the time of task::setup() call.
static ReservedPivilegedStackSize: u32 = 256;
/// Maximum number of tasks.
static MaxTasksCount: uint = 4;
mod defined_tasks_count {
use core::intrinsics::abort;
/// Total defined tasks count.
static mut DefinedTasksCount: uint = 0;
pub fn get() -> uint {
unsafe { DefinedTasksCount }
}
pub fn increase() {
unsafe {
DefinedTasksCount += 1;
if DefinedTasksCount > super::MaxTasksCount {
abort();
}
}
}
}
pub enum Status {
Runnable,
Blocked
}
/// Task descriptor, provides task stack pointer.
pub struct TaskDescriptor {
pub stack_start: u32,
pub stack_end: u32,
pub status: Status
}
impl TaskDescriptor {
pub fn block(&mut self, _: NoInterrupts) {
self.status = Blocked;
sched::switch_context();
}
pub fn unblock(&mut self, _: &NoInterrupts) { self.status = Runnable; }
}
struct TasksCollection {
pub current_task: uint,
pub tasks: [TaskDescriptor, ..MaxTasksCount],
}
pub static mut Tasks: TasksCollection = TasksCollection {
current_task: 0,
tasks: [TaskDescriptor { stack_start: 0, stack_end: 0, status: Runnable }, ..MaxTasksCount]
};
impl TasksCollection {
pub fn current_task<'a>(&'a mut self) -> &'a mut TaskDescriptor {
&mut self.tasks[self.current_task]
}
fn next_task(&mut self) {
loop {
self.current_task += 1;
if self.current_task == defined_tasks_count::get() {
self.current_task = 0;
}
match self.current_task() {
&task if !task.valid() => {}
&TaskDescriptor {status: Runnable, ..} => break,
_ => {}
}
}
}
fn add_task(&mut self, t: TaskDescriptor) {
self.tasks[defined_tasks_count::get()] = t;
defined_tasks_count::increase();
}
}
/// Initialize and start task manager.
///
/// This function keeps main stack intact. It starts the task scheduler and
/// never returns.
///
/// t should point to initial task.
#[inline(never)]
pub fn setup(t: Task, stack_size: u32) {
systick::setup(::hal::cortex_m3::systick::CALIBRATED, true);
let current_stack = sched::get_current_stack_pointer();
// User tasks start at this current stack size + reserved size aligned by 4
// bytes.
let task_stack_base: u32 = (current_stack as u32 - ReservedPivilegedStackSize) & !3;
current_stack_offset::set(task_stack_base);
let td = define_task(t, 0, stack_size, true);
td.load();
systick::enable();
sched::switch_context();
unsafe { abort() };
}
#[inline(never)]
pub fn define_task(t: Task, arg: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
systick::disable_irq();
let task_base = current_stack_offset::get();
let task_stack_size: u32 = (
stack_size +
8*4 + // hw saved regs
8*4 + // sw saved regs
8*4 // scratch pad for __morestack failure. see note on morestack below.
) & !0b1111;
current_stack_offset::set(task_base - task_stack_size);
let td = TaskDescriptor::new(t, arg, task_base, stack_size, initial);
unsafe { Tasks.add_task(td) };
systick::enable_irq();
td
}
impl TaskDescriptor {
/// Creates a new TaskDescriptor for given task, arg and stack base.
///
/// This function initializes task stack with hw saved registers.
#[inline(never)]
pub fn new(t: Task, arg: u32, stack_base: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
let state = sched::SavedState::new(t, arg);
let mut stack_top: u32 = stack_base - size_of::<sched::SavedState>() as u32;
unsafe { *(stack_top as *mut sched::SavedState) = state };
if !initial {
stack_top -= 8*4;
}
TaskDescriptor {
stack_start: stack_top,
stack_end: stack_base - stack_size,
status: Runnable,
}
}
pub fn load(&self) {
sched::set_task_stack_pointer(self.stack_start);
stack::set_stack_limit(self.stack_end);
}
pub fn save(&mut self) {
self.stack_start = sched::get_task_stack_pointer();
}
pub fn valid(&self) -> bool {
self.stack_end != 0
}
pub fn invalidate(&mut self) {
self.stack_end = 0;
}
}
#[inline(always)]
pub unsafe fn task_scheduler() {
stack::set_stack_limit(stack::stack_base() - ReservedPivilegedStackSize);
Tasks.current_task().save();
Tasks.next_task();
Tasks.current_task().load();
}
// TODO(farcaller): this should not actually use stack!
// At the time of the call of syscall(), the stack is overflown by 4, we still
// have 12 bytes in reserve and 2*8*4 to save the frame in pendsv after kill.
#[no_split_stack]
pub fn morestack() {
let psp = sched::get_task_stack_pointer();
let sp = sched::get_current_stack_pointer();
if psp == sp {
unsafe { syscall(kill_current_task, 0) };
} else {
unsafe { abort() };
}
}
#[inline(never)]
#[no_mangle]
#[no_split_stack]
pub fn kill_current_task(_: u32) {
unsafe { Tasks.current_task().invalidate() };
sched::switch_context();
}
| {
unsafe { CurrentStackOffset }
} | identifier_body |
delegate.ts | import {
Cursor,
Dict,
ElementBuilder,
Environment,
Helper,
Option,
RenderResult,
} from '@glimmer/interfaces';
import { serializeBuilder } from '@glimmer/node';
import { createConstRef, Reference } from '@glimmer/reference';
import { ASTPluginBuilder, PrecompileOptions } from '@glimmer/syntax';
import { assign, castToSimple } from '@glimmer/util';
import createHTMLDocument from '@simple-dom/document';
import {
ElementNamespace,
SimpleDocument,
SimpleDocumentFragment,
SimpleElement,
SimpleNode,
SimpleText,
} from '@simple-dom/interface';
import { ComponentKind } from '../../components';
import { replaceHTML, toInnerHTML } from '../../dom/simple-utils';
import { UserHelper } from '../../helpers';
import { TestModifierConstructor } from '../../modifiers';
import RenderDelegate, { RenderDelegateOptions } from '../../render-delegate';
import { BaseEnv } from '../env';
import { JitDelegateContext, JitTestDelegateContext } from '../jit/delegate';
import {
registerComponent,
registerHelper,
registerInternalHelper,
registerModifier,
} from '../jit/register';
import { TestJitRegistry } from '../jit/registry';
import { renderTemplate } from '../jit/render';
import { TestJitRuntimeResolver } from '../jit/resolver';
import { debugRehydration, DebugRehydrationBuilder } from './builder';
export interface RehydrationStats {
clearedNodes: SimpleNode[];
}
export class RehydrationDelegate implements RenderDelegate {
static readonly isEager = false;
static readonly style = 'rehydration';
private plugins: ASTPluginBuilder[] = [];
public clientEnv: JitTestDelegateContext;
public serverEnv: JitTestDelegateContext;
private clientResolver: TestJitRuntimeResolver;
private serverResolver: TestJitRuntimeResolver;
protected clientRegistry: TestJitRegistry;
protected serverRegistry: TestJitRegistry;
public clientDoc: SimpleDocument;
public serverDoc: SimpleDocument;
public rehydrationStats!: RehydrationStats;
private self: Option<Reference> = null;
constructor(options?: RenderDelegateOptions) {
let delegate = assign(options?.env ?? {}, BaseEnv);
this.clientDoc = castToSimple(document);
this.clientRegistry = new TestJitRegistry();
this.clientResolver = new TestJitRuntimeResolver(this.clientRegistry);
this.clientEnv = JitDelegateContext(this.clientDoc, this.clientResolver, delegate);
this.serverDoc = createHTMLDocument();
this.serverRegistry = new TestJitRegistry();
this.serverResolver = new TestJitRuntimeResolver(this.serverRegistry);
this.serverEnv = JitDelegateContext(this.serverDoc, this.serverResolver, delegate);
}
getInitialElement(): SimpleElement {
return this.clientDoc.createElement('div');
}
createElement(tagName: string): SimpleElement {
return this.clientDoc.createElement(tagName);
}
createTextNode(content: string): SimpleText {
return this.clientDoc.createTextNode(content);
}
createElementNS(namespace: ElementNamespace, tagName: string): SimpleElement {
return this.clientDoc.createElementNS(namespace, tagName);
}
createDocumentFragment(): SimpleDocumentFragment {
return this.clientDoc.createDocumentFragment();
}
getElementBuilder(env: Environment, cursor: Cursor): ElementBuilder {
if (cursor.element instanceof Node) {
return debugRehydration(env, cursor);
}
return serializeBuilder(env, cursor);
}
| (
template: string,
context: Dict<unknown>,
takeSnapshot: () => void,
element: SimpleElement | undefined = undefined
): string {
element = element || this.serverDoc.createElement('div');
let cursor = { element, nextSibling: null };
let { env } = this.serverEnv.runtime;
// Emulate server-side render
renderTemplate(
template,
this.serverEnv,
this.getSelf(env, context),
this.getElementBuilder(env, cursor),
this.precompileOptions
);
takeSnapshot();
return this.serialize(element);
}
getSelf(_env: Environment, context: unknown): Reference {
if (!this.self) {
this.self = createConstRef(context, 'this');
}
return this.self;
}
serialize(element: SimpleElement): string {
return toInnerHTML(element);
}
renderClientSide(template: string, context: Dict<unknown>, element: SimpleElement): RenderResult {
let env = this.clientEnv.runtime.env;
this.self = null;
// Client-side rehydration
let cursor = { element, nextSibling: null };
let builder = this.getElementBuilder(env, cursor) as DebugRehydrationBuilder;
let result = renderTemplate(
template,
this.clientEnv,
this.getSelf(env, context),
builder,
this.precompileOptions
);
this.rehydrationStats = {
clearedNodes: builder['clearedNodes'],
};
return result;
}
renderTemplate(
template: string,
context: Dict<unknown>,
element: SimpleElement,
snapshot: () => void
): RenderResult {
let serialized = this.renderServerSide(template, context, snapshot);
replaceHTML(element, serialized);
qunitFixture().appendChild(element);
return this.renderClientSide(template, context, element);
}
registerPlugin(plugin: ASTPluginBuilder): void {
this.plugins.push(plugin);
}
registerComponent(type: ComponentKind, _testType: string, name: string, layout: string): void {
registerComponent(this.clientRegistry, type, name, layout);
registerComponent(this.serverRegistry, type, name, layout);
}
registerHelper(name: string, helper: UserHelper): void {
registerHelper(this.clientRegistry, name, helper);
registerHelper(this.serverRegistry, name, helper);
}
registerInternalHelper(name: string, helper: Helper) {
registerInternalHelper(this.clientRegistry, name, helper);
registerInternalHelper(this.serverRegistry, name, helper);
}
registerModifier(name: string, ModifierClass: TestModifierConstructor): void {
registerModifier(this.clientRegistry, name, ModifierClass);
registerModifier(this.serverRegistry, name, ModifierClass);
}
private get precompileOptions(): PrecompileOptions {
return {
plugins: {
ast: this.plugins,
},
};
}
}
export function qunitFixture(): SimpleElement {
return castToSimple(document.getElementById('qunit-fixture')!);
}
| renderServerSide | identifier_name |
delegate.ts | import {
Cursor,
Dict,
ElementBuilder,
Environment,
Helper,
Option,
RenderResult,
} from '@glimmer/interfaces';
import { serializeBuilder } from '@glimmer/node';
import { createConstRef, Reference } from '@glimmer/reference';
import { ASTPluginBuilder, PrecompileOptions } from '@glimmer/syntax';
import { assign, castToSimple } from '@glimmer/util';
import createHTMLDocument from '@simple-dom/document';
import {
ElementNamespace,
SimpleDocument,
SimpleDocumentFragment,
SimpleElement,
SimpleNode,
SimpleText,
} from '@simple-dom/interface';
import { ComponentKind } from '../../components';
import { replaceHTML, toInnerHTML } from '../../dom/simple-utils';
import { UserHelper } from '../../helpers';
import { TestModifierConstructor } from '../../modifiers';
import RenderDelegate, { RenderDelegateOptions } from '../../render-delegate';
import { BaseEnv } from '../env';
import { JitDelegateContext, JitTestDelegateContext } from '../jit/delegate';
import {
registerComponent,
registerHelper,
registerInternalHelper,
registerModifier,
} from '../jit/register';
import { TestJitRegistry } from '../jit/registry';
import { renderTemplate } from '../jit/render';
import { TestJitRuntimeResolver } from '../jit/resolver';
import { debugRehydration, DebugRehydrationBuilder } from './builder';
export interface RehydrationStats {
clearedNodes: SimpleNode[];
}
export class RehydrationDelegate implements RenderDelegate {
static readonly isEager = false;
static readonly style = 'rehydration';
private plugins: ASTPluginBuilder[] = [];
public clientEnv: JitTestDelegateContext;
public serverEnv: JitTestDelegateContext;
private clientResolver: TestJitRuntimeResolver;
private serverResolver: TestJitRuntimeResolver;
protected clientRegistry: TestJitRegistry;
protected serverRegistry: TestJitRegistry;
public clientDoc: SimpleDocument;
public serverDoc: SimpleDocument;
public rehydrationStats!: RehydrationStats;
private self: Option<Reference> = null;
constructor(options?: RenderDelegateOptions) {
let delegate = assign(options?.env ?? {}, BaseEnv);
this.clientDoc = castToSimple(document);
this.clientRegistry = new TestJitRegistry();
this.clientResolver = new TestJitRuntimeResolver(this.clientRegistry);
this.clientEnv = JitDelegateContext(this.clientDoc, this.clientResolver, delegate);
this.serverDoc = createHTMLDocument();
this.serverRegistry = new TestJitRegistry();
this.serverResolver = new TestJitRuntimeResolver(this.serverRegistry);
this.serverEnv = JitDelegateContext(this.serverDoc, this.serverResolver, delegate);
}
getInitialElement(): SimpleElement {
return this.clientDoc.createElement('div');
}
createElement(tagName: string): SimpleElement {
return this.clientDoc.createElement(tagName);
}
createTextNode(content: string): SimpleText {
return this.clientDoc.createTextNode(content);
}
createElementNS(namespace: ElementNamespace, tagName: string): SimpleElement {
return this.clientDoc.createElementNS(namespace, tagName);
}
createDocumentFragment(): SimpleDocumentFragment {
return this.clientDoc.createDocumentFragment();
}
getElementBuilder(env: Environment, cursor: Cursor): ElementBuilder {
if (cursor.element instanceof Node) {
return debugRehydration(env, cursor);
}
return serializeBuilder(env, cursor);
}
renderServerSide(
template: string,
context: Dict<unknown>,
takeSnapshot: () => void,
element: SimpleElement | undefined = undefined
): string {
element = element || this.serverDoc.createElement('div');
let cursor = { element, nextSibling: null };
let { env } = this.serverEnv.runtime;
// Emulate server-side render
renderTemplate(
template,
this.serverEnv,
this.getSelf(env, context),
this.getElementBuilder(env, cursor),
this.precompileOptions
);
takeSnapshot();
return this.serialize(element);
}
getSelf(_env: Environment, context: unknown): Reference {
if (!this.self) {
this.self = createConstRef(context, 'this');
}
return this.self;
}
serialize(element: SimpleElement): string {
return toInnerHTML(element);
}
renderClientSide(template: string, context: Dict<unknown>, element: SimpleElement): RenderResult |
renderTemplate(
template: string,
context: Dict<unknown>,
element: SimpleElement,
snapshot: () => void
): RenderResult {
let serialized = this.renderServerSide(template, context, snapshot);
replaceHTML(element, serialized);
qunitFixture().appendChild(element);
return this.renderClientSide(template, context, element);
}
registerPlugin(plugin: ASTPluginBuilder): void {
this.plugins.push(plugin);
}
registerComponent(type: ComponentKind, _testType: string, name: string, layout: string): void {
registerComponent(this.clientRegistry, type, name, layout);
registerComponent(this.serverRegistry, type, name, layout);
}
registerHelper(name: string, helper: UserHelper): void {
registerHelper(this.clientRegistry, name, helper);
registerHelper(this.serverRegistry, name, helper);
}
registerInternalHelper(name: string, helper: Helper) {
registerInternalHelper(this.clientRegistry, name, helper);
registerInternalHelper(this.serverRegistry, name, helper);
}
registerModifier(name: string, ModifierClass: TestModifierConstructor): void {
registerModifier(this.clientRegistry, name, ModifierClass);
registerModifier(this.serverRegistry, name, ModifierClass);
}
private get precompileOptions(): PrecompileOptions {
return {
plugins: {
ast: this.plugins,
},
};
}
}
export function qunitFixture(): SimpleElement {
return castToSimple(document.getElementById('qunit-fixture')!);
}
| {
let env = this.clientEnv.runtime.env;
this.self = null;
// Client-side rehydration
let cursor = { element, nextSibling: null };
let builder = this.getElementBuilder(env, cursor) as DebugRehydrationBuilder;
let result = renderTemplate(
template,
this.clientEnv,
this.getSelf(env, context),
builder,
this.precompileOptions
);
this.rehydrationStats = {
clearedNodes: builder['clearedNodes'],
};
return result;
} | identifier_body |
delegate.ts | import {
Cursor,
Dict,
ElementBuilder,
Environment,
Helper,
Option,
RenderResult,
} from '@glimmer/interfaces';
import { serializeBuilder } from '@glimmer/node';
import { createConstRef, Reference } from '@glimmer/reference';
import { ASTPluginBuilder, PrecompileOptions } from '@glimmer/syntax';
import { assign, castToSimple } from '@glimmer/util';
import createHTMLDocument from '@simple-dom/document';
import {
ElementNamespace,
SimpleDocument,
SimpleDocumentFragment,
SimpleElement,
SimpleNode,
SimpleText,
} from '@simple-dom/interface';
import { ComponentKind } from '../../components';
import { replaceHTML, toInnerHTML } from '../../dom/simple-utils';
import { UserHelper } from '../../helpers';
import { TestModifierConstructor } from '../../modifiers';
import RenderDelegate, { RenderDelegateOptions } from '../../render-delegate';
import { BaseEnv } from '../env';
import { JitDelegateContext, JitTestDelegateContext } from '../jit/delegate';
import {
registerComponent,
registerHelper,
registerInternalHelper,
registerModifier,
} from '../jit/register';
import { TestJitRegistry } from '../jit/registry';
import { renderTemplate } from '../jit/render';
import { TestJitRuntimeResolver } from '../jit/resolver';
import { debugRehydration, DebugRehydrationBuilder } from './builder';
export interface RehydrationStats {
clearedNodes: SimpleNode[];
}
export class RehydrationDelegate implements RenderDelegate {
static readonly isEager = false;
static readonly style = 'rehydration';
private plugins: ASTPluginBuilder[] = [];
public clientEnv: JitTestDelegateContext;
public serverEnv: JitTestDelegateContext;
private clientResolver: TestJitRuntimeResolver;
private serverResolver: TestJitRuntimeResolver;
protected clientRegistry: TestJitRegistry;
protected serverRegistry: TestJitRegistry;
public clientDoc: SimpleDocument;
public serverDoc: SimpleDocument;
public rehydrationStats!: RehydrationStats;
private self: Option<Reference> = null;
constructor(options?: RenderDelegateOptions) {
let delegate = assign(options?.env ?? {}, BaseEnv);
this.clientDoc = castToSimple(document);
this.clientRegistry = new TestJitRegistry();
this.clientResolver = new TestJitRuntimeResolver(this.clientRegistry);
this.clientEnv = JitDelegateContext(this.clientDoc, this.clientResolver, delegate);
this.serverDoc = createHTMLDocument();
this.serverRegistry = new TestJitRegistry();
this.serverResolver = new TestJitRuntimeResolver(this.serverRegistry);
this.serverEnv = JitDelegateContext(this.serverDoc, this.serverResolver, delegate);
}
getInitialElement(): SimpleElement {
return this.clientDoc.createElement('div');
}
createElement(tagName: string): SimpleElement {
return this.clientDoc.createElement(tagName);
}
createTextNode(content: string): SimpleText {
return this.clientDoc.createTextNode(content);
}
createElementNS(namespace: ElementNamespace, tagName: string): SimpleElement {
return this.clientDoc.createElementNS(namespace, tagName);
}
createDocumentFragment(): SimpleDocumentFragment {
return this.clientDoc.createDocumentFragment();
}
getElementBuilder(env: Environment, cursor: Cursor): ElementBuilder {
if (cursor.element instanceof Node) {
return debugRehydration(env, cursor);
}
return serializeBuilder(env, cursor);
}
renderServerSide(
template: string,
context: Dict<unknown>,
takeSnapshot: () => void,
element: SimpleElement | undefined = undefined
): string {
element = element || this.serverDoc.createElement('div');
let cursor = { element, nextSibling: null };
let { env } = this.serverEnv.runtime;
// Emulate server-side render
renderTemplate(
template,
this.serverEnv,
this.getSelf(env, context),
this.getElementBuilder(env, cursor),
this.precompileOptions
);
takeSnapshot();
return this.serialize(element);
}
getSelf(_env: Environment, context: unknown): Reference {
if (!this.self) {
this.self = createConstRef(context, 'this');
}
return this.self; |
renderClientSide(template: string, context: Dict<unknown>, element: SimpleElement): RenderResult {
let env = this.clientEnv.runtime.env;
this.self = null;
// Client-side rehydration
let cursor = { element, nextSibling: null };
let builder = this.getElementBuilder(env, cursor) as DebugRehydrationBuilder;
let result = renderTemplate(
template,
this.clientEnv,
this.getSelf(env, context),
builder,
this.precompileOptions
);
this.rehydrationStats = {
clearedNodes: builder['clearedNodes'],
};
return result;
}
renderTemplate(
template: string,
context: Dict<unknown>,
element: SimpleElement,
snapshot: () => void
): RenderResult {
let serialized = this.renderServerSide(template, context, snapshot);
replaceHTML(element, serialized);
qunitFixture().appendChild(element);
return this.renderClientSide(template, context, element);
}
registerPlugin(plugin: ASTPluginBuilder): void {
this.plugins.push(plugin);
}
registerComponent(type: ComponentKind, _testType: string, name: string, layout: string): void {
registerComponent(this.clientRegistry, type, name, layout);
registerComponent(this.serverRegistry, type, name, layout);
}
registerHelper(name: string, helper: UserHelper): void {
registerHelper(this.clientRegistry, name, helper);
registerHelper(this.serverRegistry, name, helper);
}
registerInternalHelper(name: string, helper: Helper) {
registerInternalHelper(this.clientRegistry, name, helper);
registerInternalHelper(this.serverRegistry, name, helper);
}
registerModifier(name: string, ModifierClass: TestModifierConstructor): void {
registerModifier(this.clientRegistry, name, ModifierClass);
registerModifier(this.serverRegistry, name, ModifierClass);
}
private get precompileOptions(): PrecompileOptions {
return {
plugins: {
ast: this.plugins,
},
};
}
}
export function qunitFixture(): SimpleElement {
return castToSimple(document.getElementById('qunit-fixture')!);
} | }
serialize(element: SimpleElement): string {
return toInnerHTML(element);
} | random_line_split |
delegate.ts | import {
Cursor,
Dict,
ElementBuilder,
Environment,
Helper,
Option,
RenderResult,
} from '@glimmer/interfaces';
import { serializeBuilder } from '@glimmer/node';
import { createConstRef, Reference } from '@glimmer/reference';
import { ASTPluginBuilder, PrecompileOptions } from '@glimmer/syntax';
import { assign, castToSimple } from '@glimmer/util';
import createHTMLDocument from '@simple-dom/document';
import {
ElementNamespace,
SimpleDocument,
SimpleDocumentFragment,
SimpleElement,
SimpleNode,
SimpleText,
} from '@simple-dom/interface';
import { ComponentKind } from '../../components';
import { replaceHTML, toInnerHTML } from '../../dom/simple-utils';
import { UserHelper } from '../../helpers';
import { TestModifierConstructor } from '../../modifiers';
import RenderDelegate, { RenderDelegateOptions } from '../../render-delegate';
import { BaseEnv } from '../env';
import { JitDelegateContext, JitTestDelegateContext } from '../jit/delegate';
import {
registerComponent,
registerHelper,
registerInternalHelper,
registerModifier,
} from '../jit/register';
import { TestJitRegistry } from '../jit/registry';
import { renderTemplate } from '../jit/render';
import { TestJitRuntimeResolver } from '../jit/resolver';
import { debugRehydration, DebugRehydrationBuilder } from './builder';
export interface RehydrationStats {
clearedNodes: SimpleNode[];
}
export class RehydrationDelegate implements RenderDelegate {
static readonly isEager = false;
static readonly style = 'rehydration';
private plugins: ASTPluginBuilder[] = [];
public clientEnv: JitTestDelegateContext;
public serverEnv: JitTestDelegateContext;
private clientResolver: TestJitRuntimeResolver;
private serverResolver: TestJitRuntimeResolver;
protected clientRegistry: TestJitRegistry;
protected serverRegistry: TestJitRegistry;
public clientDoc: SimpleDocument;
public serverDoc: SimpleDocument;
public rehydrationStats!: RehydrationStats;
private self: Option<Reference> = null;
constructor(options?: RenderDelegateOptions) {
let delegate = assign(options?.env ?? {}, BaseEnv);
this.clientDoc = castToSimple(document);
this.clientRegistry = new TestJitRegistry();
this.clientResolver = new TestJitRuntimeResolver(this.clientRegistry);
this.clientEnv = JitDelegateContext(this.clientDoc, this.clientResolver, delegate);
this.serverDoc = createHTMLDocument();
this.serverRegistry = new TestJitRegistry();
this.serverResolver = new TestJitRuntimeResolver(this.serverRegistry);
this.serverEnv = JitDelegateContext(this.serverDoc, this.serverResolver, delegate);
}
getInitialElement(): SimpleElement {
return this.clientDoc.createElement('div');
}
createElement(tagName: string): SimpleElement {
return this.clientDoc.createElement(tagName);
}
createTextNode(content: string): SimpleText {
return this.clientDoc.createTextNode(content);
}
createElementNS(namespace: ElementNamespace, tagName: string): SimpleElement {
return this.clientDoc.createElementNS(namespace, tagName);
}
createDocumentFragment(): SimpleDocumentFragment {
return this.clientDoc.createDocumentFragment();
}
getElementBuilder(env: Environment, cursor: Cursor): ElementBuilder {
if (cursor.element instanceof Node) |
return serializeBuilder(env, cursor);
}
renderServerSide(
template: string,
context: Dict<unknown>,
takeSnapshot: () => void,
element: SimpleElement | undefined = undefined
): string {
element = element || this.serverDoc.createElement('div');
let cursor = { element, nextSibling: null };
let { env } = this.serverEnv.runtime;
// Emulate server-side render
renderTemplate(
template,
this.serverEnv,
this.getSelf(env, context),
this.getElementBuilder(env, cursor),
this.precompileOptions
);
takeSnapshot();
return this.serialize(element);
}
getSelf(_env: Environment, context: unknown): Reference {
if (!this.self) {
this.self = createConstRef(context, 'this');
}
return this.self;
}
serialize(element: SimpleElement): string {
return toInnerHTML(element);
}
renderClientSide(template: string, context: Dict<unknown>, element: SimpleElement): RenderResult {
let env = this.clientEnv.runtime.env;
this.self = null;
// Client-side rehydration
let cursor = { element, nextSibling: null };
let builder = this.getElementBuilder(env, cursor) as DebugRehydrationBuilder;
let result = renderTemplate(
template,
this.clientEnv,
this.getSelf(env, context),
builder,
this.precompileOptions
);
this.rehydrationStats = {
clearedNodes: builder['clearedNodes'],
};
return result;
}
renderTemplate(
template: string,
context: Dict<unknown>,
element: SimpleElement,
snapshot: () => void
): RenderResult {
let serialized = this.renderServerSide(template, context, snapshot);
replaceHTML(element, serialized);
qunitFixture().appendChild(element);
return this.renderClientSide(template, context, element);
}
registerPlugin(plugin: ASTPluginBuilder): void {
this.plugins.push(plugin);
}
registerComponent(type: ComponentKind, _testType: string, name: string, layout: string): void {
registerComponent(this.clientRegistry, type, name, layout);
registerComponent(this.serverRegistry, type, name, layout);
}
registerHelper(name: string, helper: UserHelper): void {
registerHelper(this.clientRegistry, name, helper);
registerHelper(this.serverRegistry, name, helper);
}
registerInternalHelper(name: string, helper: Helper) {
registerInternalHelper(this.clientRegistry, name, helper);
registerInternalHelper(this.serverRegistry, name, helper);
}
registerModifier(name: string, ModifierClass: TestModifierConstructor): void {
registerModifier(this.clientRegistry, name, ModifierClass);
registerModifier(this.serverRegistry, name, ModifierClass);
}
private get precompileOptions(): PrecompileOptions {
return {
plugins: {
ast: this.plugins,
},
};
}
}
export function qunitFixture(): SimpleElement {
return castToSimple(document.getElementById('qunit-fixture')!);
}
| {
return debugRehydration(env, cursor);
} | conditional_block |
sfsetup.js | "use strict";
var async = require('async');
var fs = require('fs');
var util = require('util');
var prompt = require('prompt');
var httpRequest = require('emsoap').subsystems.httpRequest;
var common = require('./common');
var mms = require('./mms');
var mmscmd = require('./mmscmd');
var deploy = require('./deploy');
var session; // MMS session
var modelFile = "sfmodel.json";
var externalSystemType = 'NODEX';
var externalSystem;
var accessAddress;
var credentials;
var mappedObjects;
var verboseLoggingForExternalSystem;
function afterAuth(cb) {
// munge mappedObjects as required
for (var name in mappedObjects) {
var map = mappedObjects[name];
if (!map.typeBindingProperties) {
map.typeBindingProperties = {};
for (var propName in map) {
switch(propName) {
case "target":
case "properties":
;
default:
map.typeBindingProperties[name] = map[name];
}
}
}
}
// invoke op to create model
session.directive(
{
op : "INVOKE",
targetType: "CdmExternalSystem",
name: "invokeExternal",
params: {
externalSystem: externalSystem,
opName : "createSfModel",
params : {
sfVersion : credentials.sfVersion,
externalSystem : externalSystem,
typeDescs : mappedObjects
}
}
},
function (err, result) {
if (err) {
return cb(err);
}
fs.writeFileSync(modelFile, JSON.stringify(result.results, null, 2));
mmscmd.execFile(modelFile,session, deploy.outputWriter, cb);
}
);
}
exports.deployModel = function deployModel(externalSystemName,mmsSession,cb) {
session = mmsSession;
externalSystem = externalSystemName;
var text;
if(!session.creds.externalCredentials) {
console.log("Profile must include externalCredentials");
process.exit(1);
}
credentials = session.creds.externalCredentials[externalSystemName];
if(!credentials) {
console.log("Profile does not provide externalCredentials for " + externalSystemName);
process.exit(1);
}
if(!credentials.oauthKey || !credentials.oauthSecret) {
console.log("externalSystemName for " + externalSystemName + " must contain the oAuth key and secret.");
}
accessAddress = credentials.host;
try { | process.exit(1);
}
try {
mappedObjects = JSON.parse(text);
} catch(err) {
console.log('Error parsing JSON in salesforce.json:' + err);
process.exit(1);
}
if(mappedObjects._verbose_logging_) {
verboseLoggingForExternalSystem = mappedObjects._verbose_logging_;
}
delete mappedObjects._verbose_logging_;
createExternalSystem(function(err) {
if (err) {
return cb(err);
}
var addr = common.global.session.creds.server + "/oauth/" + externalSystem + "/authenticate";
if (common.global.argv.nonInteractive) {
console.log("Note: what follows will fail unless Emotive has been authorized at " + addr);
afterAuth(cb);
}
else {
console.log("Please navigate to " + addr.underline + " with your browser");
prompt.start();
prompt.colors = false;
prompt.message = 'Press Enter when done';
prompt.delimiter = '';
var props = {
properties: {
q: {
description : ":"
}
}
}
prompt.get(props, function (err, result) {
if (err) {
return cb(err);
}
afterAuth(cb);
});
}
});
}
function createExternalSystem(cb) {
if (!session.creds.username)
{
console.log("session.creds.username was null");
process.exit(1);
}
if(verboseLoggingForExternalSystem) console.log('VERBOSE LOGGING IS ON FOR ' + externalSystem);
session.directive({
op: 'INVOKE',
targetType: 'CdmExternalSystem',
name: "updateOAuthExternalSystem",
params: {
name: externalSystem,
typeName: externalSystemType,
"oauthCredentials" : {
"oauthType": "salesforce",
"oauthKey": credentials.oauthKey,
"oauthSecret": credentials.oauthSecret
},
properties: {
proxyConfiguration: {verbose: verboseLoggingForExternalSystem, sfVersion: credentials.sfVersion},
globalPackageName : "sfProxy"
}
}
},
cb);
} | text = fs.readFileSync("salesforce.json");
} catch(err) {
console.log('Error reading file salesforce.json:' + err); | random_line_split |
sfsetup.js | "use strict";
var async = require('async');
var fs = require('fs');
var util = require('util');
var prompt = require('prompt');
var httpRequest = require('emsoap').subsystems.httpRequest;
var common = require('./common');
var mms = require('./mms');
var mmscmd = require('./mmscmd');
var deploy = require('./deploy');
var session; // MMS session
var modelFile = "sfmodel.json";
var externalSystemType = 'NODEX';
var externalSystem;
var accessAddress;
var credentials;
var mappedObjects;
var verboseLoggingForExternalSystem;
function | (cb) {
// munge mappedObjects as required
for (var name in mappedObjects) {
var map = mappedObjects[name];
if (!map.typeBindingProperties) {
map.typeBindingProperties = {};
for (var propName in map) {
switch(propName) {
case "target":
case "properties":
;
default:
map.typeBindingProperties[name] = map[name];
}
}
}
}
// invoke op to create model
session.directive(
{
op : "INVOKE",
targetType: "CdmExternalSystem",
name: "invokeExternal",
params: {
externalSystem: externalSystem,
opName : "createSfModel",
params : {
sfVersion : credentials.sfVersion,
externalSystem : externalSystem,
typeDescs : mappedObjects
}
}
},
function (err, result) {
if (err) {
return cb(err);
}
fs.writeFileSync(modelFile, JSON.stringify(result.results, null, 2));
mmscmd.execFile(modelFile,session, deploy.outputWriter, cb);
}
);
}
exports.deployModel = function deployModel(externalSystemName,mmsSession,cb) {
session = mmsSession;
externalSystem = externalSystemName;
var text;
if(!session.creds.externalCredentials) {
console.log("Profile must include externalCredentials");
process.exit(1);
}
credentials = session.creds.externalCredentials[externalSystemName];
if(!credentials) {
console.log("Profile does not provide externalCredentials for " + externalSystemName);
process.exit(1);
}
if(!credentials.oauthKey || !credentials.oauthSecret) {
console.log("externalSystemName for " + externalSystemName + " must contain the oAuth key and secret.");
}
accessAddress = credentials.host;
try {
text = fs.readFileSync("salesforce.json");
} catch(err) {
console.log('Error reading file salesforce.json:' + err);
process.exit(1);
}
try {
mappedObjects = JSON.parse(text);
} catch(err) {
console.log('Error parsing JSON in salesforce.json:' + err);
process.exit(1);
}
if(mappedObjects._verbose_logging_) {
verboseLoggingForExternalSystem = mappedObjects._verbose_logging_;
}
delete mappedObjects._verbose_logging_;
createExternalSystem(function(err) {
if (err) {
return cb(err);
}
var addr = common.global.session.creds.server + "/oauth/" + externalSystem + "/authenticate";
if (common.global.argv.nonInteractive) {
console.log("Note: what follows will fail unless Emotive has been authorized at " + addr);
afterAuth(cb);
}
else {
console.log("Please navigate to " + addr.underline + " with your browser");
prompt.start();
prompt.colors = false;
prompt.message = 'Press Enter when done';
prompt.delimiter = '';
var props = {
properties: {
q: {
description : ":"
}
}
}
prompt.get(props, function (err, result) {
if (err) {
return cb(err);
}
afterAuth(cb);
});
}
});
}
function createExternalSystem(cb) {
if (!session.creds.username)
{
console.log("session.creds.username was null");
process.exit(1);
}
if(verboseLoggingForExternalSystem) console.log('VERBOSE LOGGING IS ON FOR ' + externalSystem);
session.directive({
op: 'INVOKE',
targetType: 'CdmExternalSystem',
name: "updateOAuthExternalSystem",
params: {
name: externalSystem,
typeName: externalSystemType,
"oauthCredentials" : {
"oauthType": "salesforce",
"oauthKey": credentials.oauthKey,
"oauthSecret": credentials.oauthSecret
},
properties: {
proxyConfiguration: {verbose: verboseLoggingForExternalSystem, sfVersion: credentials.sfVersion},
globalPackageName : "sfProxy"
}
}
},
cb);
}
| afterAuth | identifier_name |
sfsetup.js | "use strict";
var async = require('async');
var fs = require('fs');
var util = require('util');
var prompt = require('prompt');
var httpRequest = require('emsoap').subsystems.httpRequest;
var common = require('./common');
var mms = require('./mms');
var mmscmd = require('./mmscmd');
var deploy = require('./deploy');
var session; // MMS session
var modelFile = "sfmodel.json";
var externalSystemType = 'NODEX';
var externalSystem;
var accessAddress;
var credentials;
var mappedObjects;
var verboseLoggingForExternalSystem;
function afterAuth(cb) {
// munge mappedObjects as required
for (var name in mappedObjects) {
var map = mappedObjects[name];
if (!map.typeBindingProperties) {
map.typeBindingProperties = {};
for (var propName in map) {
switch(propName) {
case "target":
case "properties":
;
default:
map.typeBindingProperties[name] = map[name];
}
}
}
}
// invoke op to create model
session.directive(
{
op : "INVOKE",
targetType: "CdmExternalSystem",
name: "invokeExternal",
params: {
externalSystem: externalSystem,
opName : "createSfModel",
params : {
sfVersion : credentials.sfVersion,
externalSystem : externalSystem,
typeDescs : mappedObjects
}
}
},
function (err, result) {
if (err) {
return cb(err);
}
fs.writeFileSync(modelFile, JSON.stringify(result.results, null, 2));
mmscmd.execFile(modelFile,session, deploy.outputWriter, cb);
}
);
}
exports.deployModel = function deployModel(externalSystemName,mmsSession,cb) {
session = mmsSession;
externalSystem = externalSystemName;
var text;
if(!session.creds.externalCredentials) {
console.log("Profile must include externalCredentials");
process.exit(1);
}
credentials = session.creds.externalCredentials[externalSystemName];
if(!credentials) {
console.log("Profile does not provide externalCredentials for " + externalSystemName);
process.exit(1);
}
if(!credentials.oauthKey || !credentials.oauthSecret) {
console.log("externalSystemName for " + externalSystemName + " must contain the oAuth key and secret.");
}
accessAddress = credentials.host;
try {
text = fs.readFileSync("salesforce.json");
} catch(err) {
console.log('Error reading file salesforce.json:' + err);
process.exit(1);
}
try {
mappedObjects = JSON.parse(text);
} catch(err) {
console.log('Error parsing JSON in salesforce.json:' + err);
process.exit(1);
}
if(mappedObjects._verbose_logging_) {
verboseLoggingForExternalSystem = mappedObjects._verbose_logging_;
}
delete mappedObjects._verbose_logging_;
createExternalSystem(function(err) {
if (err) {
return cb(err);
}
var addr = common.global.session.creds.server + "/oauth/" + externalSystem + "/authenticate";
if (common.global.argv.nonInteractive) {
console.log("Note: what follows will fail unless Emotive has been authorized at " + addr);
afterAuth(cb);
}
else |
});
}
function createExternalSystem(cb) {
if (!session.creds.username)
{
console.log("session.creds.username was null");
process.exit(1);
}
if(verboseLoggingForExternalSystem) console.log('VERBOSE LOGGING IS ON FOR ' + externalSystem);
session.directive({
op: 'INVOKE',
targetType: 'CdmExternalSystem',
name: "updateOAuthExternalSystem",
params: {
name: externalSystem,
typeName: externalSystemType,
"oauthCredentials" : {
"oauthType": "salesforce",
"oauthKey": credentials.oauthKey,
"oauthSecret": credentials.oauthSecret
},
properties: {
proxyConfiguration: {verbose: verboseLoggingForExternalSystem, sfVersion: credentials.sfVersion},
globalPackageName : "sfProxy"
}
}
},
cb);
}
| {
console.log("Please navigate to " + addr.underline + " with your browser");
prompt.start();
prompt.colors = false;
prompt.message = 'Press Enter when done';
prompt.delimiter = '';
var props = {
properties: {
q: {
description : ":"
}
}
}
prompt.get(props, function (err, result) {
if (err) {
return cb(err);
}
afterAuth(cb);
});
} | conditional_block |
sfsetup.js | "use strict";
var async = require('async');
var fs = require('fs');
var util = require('util');
var prompt = require('prompt');
var httpRequest = require('emsoap').subsystems.httpRequest;
var common = require('./common');
var mms = require('./mms');
var mmscmd = require('./mmscmd');
var deploy = require('./deploy');
var session; // MMS session
var modelFile = "sfmodel.json";
var externalSystemType = 'NODEX';
var externalSystem;
var accessAddress;
var credentials;
var mappedObjects;
var verboseLoggingForExternalSystem;
function afterAuth(cb) |
exports.deployModel = function deployModel(externalSystemName,mmsSession,cb) {
session = mmsSession;
externalSystem = externalSystemName;
var text;
if(!session.creds.externalCredentials) {
console.log("Profile must include externalCredentials");
process.exit(1);
}
credentials = session.creds.externalCredentials[externalSystemName];
if(!credentials) {
console.log("Profile does not provide externalCredentials for " + externalSystemName);
process.exit(1);
}
if(!credentials.oauthKey || !credentials.oauthSecret) {
console.log("externalSystemName for " + externalSystemName + " must contain the oAuth key and secret.");
}
accessAddress = credentials.host;
try {
text = fs.readFileSync("salesforce.json");
} catch(err) {
console.log('Error reading file salesforce.json:' + err);
process.exit(1);
}
try {
mappedObjects = JSON.parse(text);
} catch(err) {
console.log('Error parsing JSON in salesforce.json:' + err);
process.exit(1);
}
if(mappedObjects._verbose_logging_) {
verboseLoggingForExternalSystem = mappedObjects._verbose_logging_;
}
delete mappedObjects._verbose_logging_;
createExternalSystem(function(err) {
if (err) {
return cb(err);
}
var addr = common.global.session.creds.server + "/oauth/" + externalSystem + "/authenticate";
if (common.global.argv.nonInteractive) {
console.log("Note: what follows will fail unless Emotive has been authorized at " + addr);
afterAuth(cb);
}
else {
console.log("Please navigate to " + addr.underline + " with your browser");
prompt.start();
prompt.colors = false;
prompt.message = 'Press Enter when done';
prompt.delimiter = '';
var props = {
properties: {
q: {
description : ":"
}
}
}
prompt.get(props, function (err, result) {
if (err) {
return cb(err);
}
afterAuth(cb);
});
}
});
}
function createExternalSystem(cb) {
if (!session.creds.username)
{
console.log("session.creds.username was null");
process.exit(1);
}
if(verboseLoggingForExternalSystem) console.log('VERBOSE LOGGING IS ON FOR ' + externalSystem);
session.directive({
op: 'INVOKE',
targetType: 'CdmExternalSystem',
name: "updateOAuthExternalSystem",
params: {
name: externalSystem,
typeName: externalSystemType,
"oauthCredentials" : {
"oauthType": "salesforce",
"oauthKey": credentials.oauthKey,
"oauthSecret": credentials.oauthSecret
},
properties: {
proxyConfiguration: {verbose: verboseLoggingForExternalSystem, sfVersion: credentials.sfVersion},
globalPackageName : "sfProxy"
}
}
},
cb);
}
| {
// munge mappedObjects as required
for (var name in mappedObjects) {
var map = mappedObjects[name];
if (!map.typeBindingProperties) {
map.typeBindingProperties = {};
for (var propName in map) {
switch(propName) {
case "target":
case "properties":
;
default:
map.typeBindingProperties[name] = map[name];
}
}
}
}
// invoke op to create model
session.directive(
{
op : "INVOKE",
targetType: "CdmExternalSystem",
name: "invokeExternal",
params: {
externalSystem: externalSystem,
opName : "createSfModel",
params : {
sfVersion : credentials.sfVersion,
externalSystem : externalSystem,
typeDescs : mappedObjects
}
}
},
function (err, result) {
if (err) {
return cb(err);
}
fs.writeFileSync(modelFile, JSON.stringify(result.results, null, 2));
mmscmd.execFile(modelFile,session, deploy.outputWriter, cb);
}
);
} | identifier_body |
extern-fail.rs | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test linked failure
// error-pattern:explicit failure
// Testing that runtime failure doesn't cause callbacks to abort abnormally.
// Instead the failure will be delivered after the callbacks return.
extern crate libc;
use std::task;
mod rustrt {
extern crate libc;
extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
count(data - 1u) + count(data - 1u)
}
}
fn count(n: uint) -> uint {
unsafe {
task::deschedule();
rustrt::rust_dbg_call(cb, n)
}
}
fn main() {
for _ in range(0, 10u) {
task::spawn(proc() {
let result = count(5u);
println!("result = %?", result);
fail!();
});
}
} | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | random_line_split |
|
extern-fail.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test linked failure
// error-pattern:explicit failure
// Testing that runtime failure doesn't cause callbacks to abort abnormally.
// Instead the failure will be delivered after the callbacks return.
extern crate libc;
use std::task;
mod rustrt {
extern crate libc;
extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
count(data - 1u) + count(data - 1u)
}
}
fn count(n: uint) -> uint {
unsafe {
task::deschedule();
rustrt::rust_dbg_call(cb, n)
}
}
fn main() | {
for _ in range(0, 10u) {
task::spawn(proc() {
let result = count(5u);
println!("result = %?", result);
fail!();
});
}
} | identifier_body |
|
extern-fail.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test linked failure
// error-pattern:explicit failure
// Testing that runtime failure doesn't cause callbacks to abort abnormally.
// Instead the failure will be delivered after the callbacks return.
extern crate libc;
use std::task;
mod rustrt {
extern crate libc;
extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else |
}
fn count(n: uint) -> uint {
unsafe {
task::deschedule();
rustrt::rust_dbg_call(cb, n)
}
}
fn main() {
for _ in range(0, 10u) {
task::spawn(proc() {
let result = count(5u);
println!("result = %?", result);
fail!();
});
}
}
| {
count(data - 1u) + count(data - 1u)
} | conditional_block |
extern-fail.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test linked failure
// error-pattern:explicit failure
// Testing that runtime failure doesn't cause callbacks to abort abnormally.
// Instead the failure will be delivered after the callbacks return.
extern crate libc;
use std::task;
mod rustrt {
extern crate libc;
extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
count(data - 1u) + count(data - 1u)
}
}
fn | (n: uint) -> uint {
unsafe {
task::deschedule();
rustrt::rust_dbg_call(cb, n)
}
}
fn main() {
for _ in range(0, 10u) {
task::spawn(proc() {
let result = count(5u);
println!("result = %?", result);
fail!();
});
}
}
| count | identifier_name |
validation.js | /**
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
/*jshint jquery:true*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
define([
"jquery",
"mage/validation",
"mage/translate"
], factory);
} else |
}(function ($) {
"use strict";
/**
* Validation rule for grouped product, with multiple qty fields,
* only one qty needs to have a positive integer
*/
$.validator.addMethod(
"validate-grouped-qty",
function(value, element, params) {
var result = false;
var total = 0;
$(params).find('input[data-validate*="validate-grouped-qty"]').each(function(i, e) {
var val = $(e).val();
if (val && val.length > 0) {
result = true;
var valInt = parseInt(val, 10) || 0;
if (valInt >= 0) {
total += valInt;
} else {
result = false;
return result;
}
}
});
return result && total > 0;
},
'Please specify the quantity of product(s).'
);
$.validator.addMethod(
"validate-one-checkbox-required-by-name",
function(value, element, params) {
var checkedCount = 0;
if (element.type === 'checkbox') {
$('[name="' + element.name + '"]').each(function() {
if ($(this).is(':checked')) {
checkedCount += 1;
return false;
}
});
}
var container = '#' + params;
if (checkedCount > 0) {
$(container).removeClass('validation-failed');
$(container).addClass('validation-passed');
return true;
} else {
$(container).addClass('validation-failed');
$(container).removeClass('validation-passed');
return false;
}
},
'Please select one of the options.'
);
$.validator.addMethod(
"validate-date-between",
function(value, element, params) {
var minDate = new Date(params[0]),
maxDate = new Date(params[1]),
inputDate = new Date(element.value);
minDate.setHours(0);
maxDate.setHours(0);
if (inputDate >= minDate && inputDate <= maxDate) {
return true;
}
this.dateBetweenErrorMessage = $.mage.__('Please enter a date between %min and %max.').replace('%min', minDate).replace('%max', maxDate);
return false;
},
function(){
return this.dateBetweenErrorMessage;
}
);
}));
| {
factory(jQuery);
} | conditional_block |
validation.js | /**
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
/*jshint jquery:true*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
define([
"jquery",
"mage/validation",
"mage/translate"
], factory);
} else {
factory(jQuery);
}
}(function ($) {
"use strict";
/**
* Validation rule for grouped product, with multiple qty fields,
* only one qty needs to have a positive integer
*/
$.validator.addMethod(
"validate-grouped-qty",
function(value, element, params) {
var result = false;
var total = 0;
$(params).find('input[data-validate*="validate-grouped-qty"]').each(function(i, e) {
var val = $(e).val();
if (val && val.length > 0) {
result = true;
var valInt = parseInt(val, 10) || 0;
if (valInt >= 0) {
total += valInt;
} else {
result = false;
return result;
}
}
});
return result && total > 0;
},
'Please specify the quantity of product(s).'
);
$.validator.addMethod(
"validate-one-checkbox-required-by-name",
function(value, element, params) {
var checkedCount = 0;
if (element.type === 'checkbox') {
$('[name="' + element.name + '"]').each(function() {
if ($(this).is(':checked')) {
checkedCount += 1;
return false;
}
});
}
var container = '#' + params; | } else {
$(container).addClass('validation-failed');
$(container).removeClass('validation-passed');
return false;
}
},
'Please select one of the options.'
);
$.validator.addMethod(
"validate-date-between",
function(value, element, params) {
var minDate = new Date(params[0]),
maxDate = new Date(params[1]),
inputDate = new Date(element.value);
minDate.setHours(0);
maxDate.setHours(0);
if (inputDate >= minDate && inputDate <= maxDate) {
return true;
}
this.dateBetweenErrorMessage = $.mage.__('Please enter a date between %min and %max.').replace('%min', minDate).replace('%max', maxDate);
return false;
},
function(){
return this.dateBetweenErrorMessage;
}
);
})); | if (checkedCount > 0) {
$(container).removeClass('validation-failed');
$(container).addClass('validation-passed');
return true; | random_line_split |
menuDirective.js | /**
* @ngdoc directive
* @name mdMenu
* @module material.components.menu
* @restrict E
* @description
*
* Menus are elements that open when clicked. They are useful for displaying
* additional options within the context of an action.
*
* Every `md-menu` must specify exactly two child elements. The first element is what is
* left in the DOM and is used to open the menu. This element is called the trigger element.
* The trigger element's scope has access to `$mdOpenMenu($event)`
* which it may call to open the menu. By passing $event as argument, the
* corresponding event is stopped from propagating up the DOM-tree.
*
* The second element is the `md-menu-content` element which represents the
* contents of the menu when it is open. Typically this will contain `md-menu-item`s,
* but you can do custom content as well.
*
* <hljs lang="html">
* <md-menu>
* <!-- Trigger element is a md-button with an icon -->
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open sample menu">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
* ## Sizing Menus
*
* The width of the menu when it is open may be specified by specifying a `width`
* attribute on the `md-menu-content` element.
* See the [Material Design Spec](http://www.google.com/design/spec/components/menus.html#menus-specs)
* for more information.
*
*
* ## Aligning Menus
*
* When a menu opens, it is important that the content aligns with the trigger element.
* Failure to align menus can result in jarring experiences for users as content
* suddenly shifts. To help with this, `md-menu` provides serveral APIs to help
* with alignment.
*
* ### Target Mode
*
* By default, `md-menu` will attempt to align the `md-menu-content` by aligning
* designated child elements in both the trigger and the menu content.
*
* To specify the alignment element in the `trigger` you can use the `md-menu-origin`
* attribute on a child element. If no `md-menu-origin` is specified, the `md-menu`
* will be used as the origin element.
*
* Similarly, the `md-menu-content` may specify a `md-menu-align-target` for a
* `md-menu-item` to specify the node that it should try and align with.
*
* In this example code, we specify an icon to be our origin element, and an
* icon in our menu content to be our alignment target. This ensures that both
* icons are aligned when the menu opens.
*
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open some menu">
* <md-icon md-menu-origin md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* Sometimes we want to specify alignment on the right side of an element, for example
* if we have a menu on the right side a toolbar, we want to right align our menu content.
*
* We can specify the origin by using the `md-position-mode` attribute on both
* the `x` and `y` axis. Right now only the `x-axis` has more than one option.
* You may specify the default mode of `target target` or
* `target-right target` to specify a right-oriented alignment target. See the
* position section of the demos for more examples.
*
* ### Menu Offsets
*
* It is sometimes unavoidable to need to have a deeper level of control for
* the positioning of a menu to ensure perfect alignment. `md-menu` provides
* the `md-offset` attribute to allow pixel level specificty of adjusting the
* exact positioning.
*
* This offset is provided in the format of `x y` or `n` where `n` will be used
* in both the `x` and `y` axis.
*
* For example, to move a menu by `2px` from the top, we can use:
* <hljs lang="html">
* <md-menu md-offset="2 0">
* <!-- menu-content -->
* </md-menu>
* </hljs>
*
* ### Auto Focus
* By default, when a menu opens, `md-menu` focuses the first button in the menu content.
*
* But sometimes you would like to focus another specific menu item instead of the first.<br/>
* This can be done by applying the `md-autofocus` directive on the given element.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button md-autofocus ng-click="doSomething()">
* Auto Focus
* </md-button>
* </md-menu-item>
* </hljs>
*
*
* ### Preventing close
*
* Sometimes you would like to be able to click on a menu item without having the menu
* close. To do this, ngMaterial exposes the `md-prevent-menu-close` attribute which
* can be added to a button inside a menu to stop the menu from automatically closing.
* You can then close the menu programatically by injecting `$mdMenu` and calling
* `$mdMenu.hide()`.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something" md-prevent-menu-close="md-prevent-menu-close">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </hljs>
*
* @usage
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* @param {string} md-position-mode The position mode in the form of
* `x`, `y`. Default value is `target`,`target`. Right now the `x` axis
* also suppports `target-right`.
* @param {string} md-offset An offset to apply to the dropdown after positioning
* `x`, `y`. Default value is `0`,`0`.
*
*/
angular
.module('material.components.menu')
.directive('mdMenu', MenuDirective);
/**
* @ngInject
*/
function MenuDirective($mdUtil) {
var INVALID_PREFIX = 'Invalid HTML for md-menu: ';
return {
restrict: 'E',
require: ['mdMenu', '?^mdMenuBar'],
controller: 'mdMenuCtrl', // empty function to be built by link
scope: true,
compile: compile
};
function compile(templateElement) { | triggerElement = triggerElement
.querySelector(prefixer.buildSelector(['ng-click', 'ng-mouseenter'])) || triggerElement;
}
if (triggerElement && (
triggerElement.nodeName == 'MD-BUTTON' ||
triggerElement.nodeName == 'BUTTON'
) && !triggerElement.hasAttribute('type')) {
triggerElement.setAttribute('type', 'button');
}
if (templateElement.children().length != 2) {
throw Error(INVALID_PREFIX + 'Expected two children elements.');
}
// Default element for ARIA attributes has the ngClick or ngMouseenter expression
triggerElement && triggerElement.setAttribute('aria-haspopup', 'true');
var nestedMenus = templateElement[0].querySelectorAll('md-menu');
var nestingDepth = parseInt(templateElement[0].getAttribute('md-nest-level'), 10) || 0;
if (nestedMenus) {
angular.forEach($mdUtil.nodesToArray(nestedMenus), function(menuEl) {
if (!menuEl.hasAttribute('md-position-mode')) {
menuEl.setAttribute('md-position-mode', 'cascade');
}
menuEl.classList.add('_md-nested-menu');
menuEl.setAttribute('md-nest-level', nestingDepth + 1);
});
}
return link;
}
function link(scope, element, attr, ctrls) {
var mdMenuCtrl = ctrls[0];
var isInMenuBar = ctrls[1] != undefined;
// Move everything into a md-menu-container and pass it to the controller
var menuContainer = angular.element( '<div class="_md md-open-menu-container md-whiteframe-z2"></div>');
var menuContents = element.children()[1];
element.addClass('_md'); // private md component indicator for styling
if (!menuContents.hasAttribute('role')) {
menuContents.setAttribute('role', 'menu');
}
menuContainer.append(menuContents);
element.on('$destroy', function() {
menuContainer.remove();
});
element.append(menuContainer);
menuContainer[0].style.display = 'none';
mdMenuCtrl.init(menuContainer, { isInMenuBar: isInMenuBar });
}
} | templateElement.addClass('md-menu');
var triggerElement = templateElement.children()[0];
var prefixer = $mdUtil.prefixer();
if (!prefixer.hasAttribute(triggerElement, 'ng-click')) { | random_line_split |
menuDirective.js | /**
* @ngdoc directive
* @name mdMenu
* @module material.components.menu
* @restrict E
* @description
*
* Menus are elements that open when clicked. They are useful for displaying
* additional options within the context of an action.
*
* Every `md-menu` must specify exactly two child elements. The first element is what is
* left in the DOM and is used to open the menu. This element is called the trigger element.
* The trigger element's scope has access to `$mdOpenMenu($event)`
* which it may call to open the menu. By passing $event as argument, the
* corresponding event is stopped from propagating up the DOM-tree.
*
* The second element is the `md-menu-content` element which represents the
* contents of the menu when it is open. Typically this will contain `md-menu-item`s,
* but you can do custom content as well.
*
* <hljs lang="html">
* <md-menu>
* <!-- Trigger element is a md-button with an icon -->
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open sample menu">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
* ## Sizing Menus
*
* The width of the menu when it is open may be specified by specifying a `width`
* attribute on the `md-menu-content` element.
* See the [Material Design Spec](http://www.google.com/design/spec/components/menus.html#menus-specs)
* for more information.
*
*
* ## Aligning Menus
*
* When a menu opens, it is important that the content aligns with the trigger element.
* Failure to align menus can result in jarring experiences for users as content
* suddenly shifts. To help with this, `md-menu` provides serveral APIs to help
* with alignment.
*
* ### Target Mode
*
* By default, `md-menu` will attempt to align the `md-menu-content` by aligning
* designated child elements in both the trigger and the menu content.
*
* To specify the alignment element in the `trigger` you can use the `md-menu-origin`
* attribute on a child element. If no `md-menu-origin` is specified, the `md-menu`
* will be used as the origin element.
*
* Similarly, the `md-menu-content` may specify a `md-menu-align-target` for a
* `md-menu-item` to specify the node that it should try and align with.
*
* In this example code, we specify an icon to be our origin element, and an
* icon in our menu content to be our alignment target. This ensures that both
* icons are aligned when the menu opens.
*
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open some menu">
* <md-icon md-menu-origin md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* Sometimes we want to specify alignment on the right side of an element, for example
* if we have a menu on the right side a toolbar, we want to right align our menu content.
*
* We can specify the origin by using the `md-position-mode` attribute on both
* the `x` and `y` axis. Right now only the `x-axis` has more than one option.
* You may specify the default mode of `target target` or
* `target-right target` to specify a right-oriented alignment target. See the
* position section of the demos for more examples.
*
* ### Menu Offsets
*
* It is sometimes unavoidable to need to have a deeper level of control for
* the positioning of a menu to ensure perfect alignment. `md-menu` provides
* the `md-offset` attribute to allow pixel level specificty of adjusting the
* exact positioning.
*
* This offset is provided in the format of `x y` or `n` where `n` will be used
* in both the `x` and `y` axis.
*
* For example, to move a menu by `2px` from the top, we can use:
* <hljs lang="html">
* <md-menu md-offset="2 0">
* <!-- menu-content -->
* </md-menu>
* </hljs>
*
* ### Auto Focus
* By default, when a menu opens, `md-menu` focuses the first button in the menu content.
*
* But sometimes you would like to focus another specific menu item instead of the first.<br/>
* This can be done by applying the `md-autofocus` directive on the given element.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button md-autofocus ng-click="doSomething()">
* Auto Focus
* </md-button>
* </md-menu-item>
* </hljs>
*
*
* ### Preventing close
*
* Sometimes you would like to be able to click on a menu item without having the menu
* close. To do this, ngMaterial exposes the `md-prevent-menu-close` attribute which
* can be added to a button inside a menu to stop the menu from automatically closing.
* You can then close the menu programatically by injecting `$mdMenu` and calling
* `$mdMenu.hide()`.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something" md-prevent-menu-close="md-prevent-menu-close">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </hljs>
*
* @usage
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* @param {string} md-position-mode The position mode in the form of
* `x`, `y`. Default value is `target`,`target`. Right now the `x` axis
* also suppports `target-right`.
* @param {string} md-offset An offset to apply to the dropdown after positioning
* `x`, `y`. Default value is `0`,`0`.
*
*/
angular
.module('material.components.menu')
.directive('mdMenu', MenuDirective);
/**
* @ngInject
*/
function MenuDirective($mdUtil) {
var INVALID_PREFIX = 'Invalid HTML for md-menu: ';
return {
restrict: 'E',
require: ['mdMenu', '?^mdMenuBar'],
controller: 'mdMenuCtrl', // empty function to be built by link
scope: true,
compile: compile
};
function compile(templateElement) {
templateElement.addClass('md-menu');
var triggerElement = templateElement.children()[0];
var prefixer = $mdUtil.prefixer();
if (!prefixer.hasAttribute(triggerElement, 'ng-click')) {
triggerElement = triggerElement
.querySelector(prefixer.buildSelector(['ng-click', 'ng-mouseenter'])) || triggerElement;
}
if (triggerElement && (
triggerElement.nodeName == 'MD-BUTTON' ||
triggerElement.nodeName == 'BUTTON'
) && !triggerElement.hasAttribute('type')) |
if (templateElement.children().length != 2) {
throw Error(INVALID_PREFIX + 'Expected two children elements.');
}
// Default element for ARIA attributes has the ngClick or ngMouseenter expression
triggerElement && triggerElement.setAttribute('aria-haspopup', 'true');
var nestedMenus = templateElement[0].querySelectorAll('md-menu');
var nestingDepth = parseInt(templateElement[0].getAttribute('md-nest-level'), 10) || 0;
if (nestedMenus) {
angular.forEach($mdUtil.nodesToArray(nestedMenus), function(menuEl) {
if (!menuEl.hasAttribute('md-position-mode')) {
menuEl.setAttribute('md-position-mode', 'cascade');
}
menuEl.classList.add('_md-nested-menu');
menuEl.setAttribute('md-nest-level', nestingDepth + 1);
});
}
return link;
}
function link(scope, element, attr, ctrls) {
var mdMenuCtrl = ctrls[0];
var isInMenuBar = ctrls[1] != undefined;
// Move everything into a md-menu-container and pass it to the controller
var menuContainer = angular.element( '<div class="_md md-open-menu-container md-whiteframe-z2"></div>');
var menuContents = element.children()[1];
element.addClass('_md'); // private md component indicator for styling
if (!menuContents.hasAttribute('role')) {
menuContents.setAttribute('role', 'menu');
}
menuContainer.append(menuContents);
element.on('$destroy', function() {
menuContainer.remove();
});
element.append(menuContainer);
menuContainer[0].style.display = 'none';
mdMenuCtrl.init(menuContainer, { isInMenuBar: isInMenuBar });
}
}
| {
triggerElement.setAttribute('type', 'button');
} | conditional_block |
menuDirective.js | /**
* @ngdoc directive
* @name mdMenu
* @module material.components.menu
* @restrict E
* @description
*
* Menus are elements that open when clicked. They are useful for displaying
* additional options within the context of an action.
*
* Every `md-menu` must specify exactly two child elements. The first element is what is
* left in the DOM and is used to open the menu. This element is called the trigger element.
* The trigger element's scope has access to `$mdOpenMenu($event)`
* which it may call to open the menu. By passing $event as argument, the
* corresponding event is stopped from propagating up the DOM-tree.
*
* The second element is the `md-menu-content` element which represents the
* contents of the menu when it is open. Typically this will contain `md-menu-item`s,
* but you can do custom content as well.
*
* <hljs lang="html">
* <md-menu>
* <!-- Trigger element is a md-button with an icon -->
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open sample menu">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
* ## Sizing Menus
*
* The width of the menu when it is open may be specified by specifying a `width`
* attribute on the `md-menu-content` element.
* See the [Material Design Spec](http://www.google.com/design/spec/components/menus.html#menus-specs)
* for more information.
*
*
* ## Aligning Menus
*
* When a menu opens, it is important that the content aligns with the trigger element.
* Failure to align menus can result in jarring experiences for users as content
* suddenly shifts. To help with this, `md-menu` provides serveral APIs to help
* with alignment.
*
* ### Target Mode
*
* By default, `md-menu` will attempt to align the `md-menu-content` by aligning
* designated child elements in both the trigger and the menu content.
*
* To specify the alignment element in the `trigger` you can use the `md-menu-origin`
* attribute on a child element. If no `md-menu-origin` is specified, the `md-menu`
* will be used as the origin element.
*
* Similarly, the `md-menu-content` may specify a `md-menu-align-target` for a
* `md-menu-item` to specify the node that it should try and align with.
*
* In this example code, we specify an icon to be our origin element, and an
* icon in our menu content to be our alignment target. This ensures that both
* icons are aligned when the menu opens.
*
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open some menu">
* <md-icon md-menu-origin md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* Sometimes we want to specify alignment on the right side of an element, for example
* if we have a menu on the right side a toolbar, we want to right align our menu content.
*
* We can specify the origin by using the `md-position-mode` attribute on both
* the `x` and `y` axis. Right now only the `x-axis` has more than one option.
* You may specify the default mode of `target target` or
* `target-right target` to specify a right-oriented alignment target. See the
* position section of the demos for more examples.
*
* ### Menu Offsets
*
* It is sometimes unavoidable to need to have a deeper level of control for
* the positioning of a menu to ensure perfect alignment. `md-menu` provides
* the `md-offset` attribute to allow pixel level specificty of adjusting the
* exact positioning.
*
* This offset is provided in the format of `x y` or `n` where `n` will be used
* in both the `x` and `y` axis.
*
* For example, to move a menu by `2px` from the top, we can use:
* <hljs lang="html">
* <md-menu md-offset="2 0">
* <!-- menu-content -->
* </md-menu>
* </hljs>
*
* ### Auto Focus
* By default, when a menu opens, `md-menu` focuses the first button in the menu content.
*
* But sometimes you would like to focus another specific menu item instead of the first.<br/>
* This can be done by applying the `md-autofocus` directive on the given element.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button md-autofocus ng-click="doSomething()">
* Auto Focus
* </md-button>
* </md-menu-item>
* </hljs>
*
*
* ### Preventing close
*
* Sometimes you would like to be able to click on a menu item without having the menu
* close. To do this, ngMaterial exposes the `md-prevent-menu-close` attribute which
* can be added to a button inside a menu to stop the menu from automatically closing.
* You can then close the menu programatically by injecting `$mdMenu` and calling
* `$mdMenu.hide()`.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something" md-prevent-menu-close="md-prevent-menu-close">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </hljs>
*
* @usage
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* @param {string} md-position-mode The position mode in the form of
* `x`, `y`. Default value is `target`,`target`. Right now the `x` axis
* also suppports `target-right`.
* @param {string} md-offset An offset to apply to the dropdown after positioning
* `x`, `y`. Default value is `0`,`0`.
*
*/
angular
.module('material.components.menu')
.directive('mdMenu', MenuDirective);
/**
* @ngInject
*/
function MenuDirective($mdUtil) {
var INVALID_PREFIX = 'Invalid HTML for md-menu: ';
return {
restrict: 'E',
require: ['mdMenu', '?^mdMenuBar'],
controller: 'mdMenuCtrl', // empty function to be built by link
scope: true,
compile: compile
};
function compile(templateElement) {
templateElement.addClass('md-menu');
var triggerElement = templateElement.children()[0];
var prefixer = $mdUtil.prefixer();
if (!prefixer.hasAttribute(triggerElement, 'ng-click')) {
triggerElement = triggerElement
.querySelector(prefixer.buildSelector(['ng-click', 'ng-mouseenter'])) || triggerElement;
}
if (triggerElement && (
triggerElement.nodeName == 'MD-BUTTON' ||
triggerElement.nodeName == 'BUTTON'
) && !triggerElement.hasAttribute('type')) {
triggerElement.setAttribute('type', 'button');
}
if (templateElement.children().length != 2) {
throw Error(INVALID_PREFIX + 'Expected two children elements.');
}
// Default element for ARIA attributes has the ngClick or ngMouseenter expression
triggerElement && triggerElement.setAttribute('aria-haspopup', 'true');
var nestedMenus = templateElement[0].querySelectorAll('md-menu');
var nestingDepth = parseInt(templateElement[0].getAttribute('md-nest-level'), 10) || 0;
if (nestedMenus) {
angular.forEach($mdUtil.nodesToArray(nestedMenus), function(menuEl) {
if (!menuEl.hasAttribute('md-position-mode')) {
menuEl.setAttribute('md-position-mode', 'cascade');
}
menuEl.classList.add('_md-nested-menu');
menuEl.setAttribute('md-nest-level', nestingDepth + 1);
});
}
return link;
}
function link(scope, element, attr, ctrls) |
}
| {
var mdMenuCtrl = ctrls[0];
var isInMenuBar = ctrls[1] != undefined;
// Move everything into a md-menu-container and pass it to the controller
var menuContainer = angular.element( '<div class="_md md-open-menu-container md-whiteframe-z2"></div>');
var menuContents = element.children()[1];
element.addClass('_md'); // private md component indicator for styling
if (!menuContents.hasAttribute('role')) {
menuContents.setAttribute('role', 'menu');
}
menuContainer.append(menuContents);
element.on('$destroy', function() {
menuContainer.remove();
});
element.append(menuContainer);
menuContainer[0].style.display = 'none';
mdMenuCtrl.init(menuContainer, { isInMenuBar: isInMenuBar });
} | identifier_body |
menuDirective.js | /**
* @ngdoc directive
* @name mdMenu
* @module material.components.menu
* @restrict E
* @description
*
* Menus are elements that open when clicked. They are useful for displaying
* additional options within the context of an action.
*
* Every `md-menu` must specify exactly two child elements. The first element is what is
* left in the DOM and is used to open the menu. This element is called the trigger element.
* The trigger element's scope has access to `$mdOpenMenu($event)`
* which it may call to open the menu. By passing $event as argument, the
* corresponding event is stopped from propagating up the DOM-tree.
*
* The second element is the `md-menu-content` element which represents the
* contents of the menu when it is open. Typically this will contain `md-menu-item`s,
* but you can do custom content as well.
*
* <hljs lang="html">
* <md-menu>
* <!-- Trigger element is a md-button with an icon -->
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open sample menu">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
* ## Sizing Menus
*
* The width of the menu when it is open may be specified by specifying a `width`
* attribute on the `md-menu-content` element.
* See the [Material Design Spec](http://www.google.com/design/spec/components/menus.html#menus-specs)
* for more information.
*
*
* ## Aligning Menus
*
* When a menu opens, it is important that the content aligns with the trigger element.
* Failure to align menus can result in jarring experiences for users as content
* suddenly shifts. To help with this, `md-menu` provides serveral APIs to help
* with alignment.
*
* ### Target Mode
*
* By default, `md-menu` will attempt to align the `md-menu-content` by aligning
* designated child elements in both the trigger and the menu content.
*
* To specify the alignment element in the `trigger` you can use the `md-menu-origin`
* attribute on a child element. If no `md-menu-origin` is specified, the `md-menu`
* will be used as the origin element.
*
* Similarly, the `md-menu-content` may specify a `md-menu-align-target` for a
* `md-menu-item` to specify the node that it should try and align with.
*
* In this example code, we specify an icon to be our origin element, and an
* icon in our menu content to be our alignment target. This ensures that both
* icons are aligned when the menu opens.
*
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button" aria-label="Open some menu">
* <md-icon md-menu-origin md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* Sometimes we want to specify alignment on the right side of an element, for example
* if we have a menu on the right side a toolbar, we want to right align our menu content.
*
* We can specify the origin by using the `md-position-mode` attribute on both
* the `x` and `y` axis. Right now only the `x-axis` has more than one option.
* You may specify the default mode of `target target` or
* `target-right target` to specify a right-oriented alignment target. See the
* position section of the demos for more examples.
*
* ### Menu Offsets
*
* It is sometimes unavoidable to need to have a deeper level of control for
* the positioning of a menu to ensure perfect alignment. `md-menu` provides
* the `md-offset` attribute to allow pixel level specificty of adjusting the
* exact positioning.
*
* This offset is provided in the format of `x y` or `n` where `n` will be used
* in both the `x` and `y` axis.
*
* For example, to move a menu by `2px` from the top, we can use:
* <hljs lang="html">
* <md-menu md-offset="2 0">
* <!-- menu-content -->
* </md-menu>
* </hljs>
*
* ### Auto Focus
* By default, when a menu opens, `md-menu` focuses the first button in the menu content.
*
* But sometimes you would like to focus another specific menu item instead of the first.<br/>
* This can be done by applying the `md-autofocus` directive on the given element.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button md-autofocus ng-click="doSomething()">
* Auto Focus
* </md-button>
* </md-menu-item>
* </hljs>
*
*
* ### Preventing close
*
* Sometimes you would like to be able to click on a menu item without having the menu
* close. To do this, ngMaterial exposes the `md-prevent-menu-close` attribute which
* can be added to a button inside a menu to stop the menu from automatically closing.
* You can then close the menu programatically by injecting `$mdMenu` and calling
* `$mdMenu.hide()`.
*
* <hljs lang="html">
* <md-menu-item>
* <md-button ng-click="doSomething()" aria-label="Do something" md-prevent-menu-close="md-prevent-menu-close">
* <md-icon md-menu-align-target md-svg-icon="call:phone"></md-icon>
* Do Something
* </md-button>
* </md-menu-item>
* </hljs>
*
* @usage
* <hljs lang="html">
* <md-menu>
* <md-button ng-click="$mdOpenMenu($event)" class="md-icon-button">
* <md-icon md-svg-icon="call:phone"></md-icon>
* </md-button>
* <md-menu-content>
* <md-menu-item><md-button ng-click="doSomething()">Do Something</md-button></md-menu-item>
* </md-menu-content>
* </md-menu>
* </hljs>
*
* @param {string} md-position-mode The position mode in the form of
* `x`, `y`. Default value is `target`,`target`. Right now the `x` axis
* also suppports `target-right`.
* @param {string} md-offset An offset to apply to the dropdown after positioning
* `x`, `y`. Default value is `0`,`0`.
*
*/
angular
.module('material.components.menu')
.directive('mdMenu', MenuDirective);
/**
* @ngInject
*/
function MenuDirective($mdUtil) {
var INVALID_PREFIX = 'Invalid HTML for md-menu: ';
return {
restrict: 'E',
require: ['mdMenu', '?^mdMenuBar'],
controller: 'mdMenuCtrl', // empty function to be built by link
scope: true,
compile: compile
};
function | (templateElement) {
templateElement.addClass('md-menu');
var triggerElement = templateElement.children()[0];
var prefixer = $mdUtil.prefixer();
if (!prefixer.hasAttribute(triggerElement, 'ng-click')) {
triggerElement = triggerElement
.querySelector(prefixer.buildSelector(['ng-click', 'ng-mouseenter'])) || triggerElement;
}
if (triggerElement && (
triggerElement.nodeName == 'MD-BUTTON' ||
triggerElement.nodeName == 'BUTTON'
) && !triggerElement.hasAttribute('type')) {
triggerElement.setAttribute('type', 'button');
}
if (templateElement.children().length != 2) {
throw Error(INVALID_PREFIX + 'Expected two children elements.');
}
// Default element for ARIA attributes has the ngClick or ngMouseenter expression
triggerElement && triggerElement.setAttribute('aria-haspopup', 'true');
var nestedMenus = templateElement[0].querySelectorAll('md-menu');
var nestingDepth = parseInt(templateElement[0].getAttribute('md-nest-level'), 10) || 0;
if (nestedMenus) {
angular.forEach($mdUtil.nodesToArray(nestedMenus), function(menuEl) {
if (!menuEl.hasAttribute('md-position-mode')) {
menuEl.setAttribute('md-position-mode', 'cascade');
}
menuEl.classList.add('_md-nested-menu');
menuEl.setAttribute('md-nest-level', nestingDepth + 1);
});
}
return link;
}
function link(scope, element, attr, ctrls) {
var mdMenuCtrl = ctrls[0];
var isInMenuBar = ctrls[1] != undefined;
// Move everything into a md-menu-container and pass it to the controller
var menuContainer = angular.element( '<div class="_md md-open-menu-container md-whiteframe-z2"></div>');
var menuContents = element.children()[1];
element.addClass('_md'); // private md component indicator for styling
if (!menuContents.hasAttribute('role')) {
menuContents.setAttribute('role', 'menu');
}
menuContainer.append(menuContents);
element.on('$destroy', function() {
menuContainer.remove();
});
element.append(menuContainer);
menuContainer[0].style.display = 'none';
mdMenuCtrl.init(menuContainer, { isInMenuBar: isInMenuBar });
}
}
| compile | identifier_name |
applicable_declarations.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Applicable declarations management.
use properties::PropertyDeclarationBlock;
use rule_tree::{CascadeLevel, StyleSource};
use servo_arc::Arc;
use shared_lock::Locked;
use smallvec::SmallVec;
use std::fmt::{Debug, self};
use std::mem;
/// List of applicable declarations. This is a transient structure that shuttles
/// declarations between selector matching and inserting into the rule tree, and
/// therefore we want to avoid heap-allocation where possible.
///
/// In measurements on wikipedia, we pretty much never have more than 8 applicable
/// declarations, so we could consider making this 8 entries instead of 16.
/// However, it may depend a lot on workload, and stack space is cheap.
pub type ApplicableDeclarationList = SmallVec<[ApplicableDeclarationBlock; 16]>;
/// Blink uses 18 bits to store source order, and does not check overflow [1].
/// That's a limit that could be reached in realistic webpages, so we use
/// 24 bits and enforce defined behavior in the overflow case.
///
/// Note that the value of 24 is also hard-coded into the level() accessor,
/// which does a byte-aligned load of the 4th byte. If you change this value
/// you'll need to change that as well.
///
/// [1] https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/css/
/// RuleSet.h?l=128&rcl=90140ab80b84d0f889abc253410f44ed54ae04f3
const SOURCE_ORDER_BITS: usize = 24;
const SOURCE_ORDER_MASK: u32 = (1 << SOURCE_ORDER_BITS) - 1;
const SOURCE_ORDER_MAX: u32 = SOURCE_ORDER_MASK;
/// Stores the source order of a block and the cascade level it belongs to.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Copy, Clone, Eq, PartialEq)]
struct SourceOrderAndCascadeLevel(u32);
impl SourceOrderAndCascadeLevel {
fn new(source_order: u32, cascade_level: CascadeLevel) -> SourceOrderAndCascadeLevel {
let mut bits = ::std::cmp::min(source_order, SOURCE_ORDER_MAX);
bits |= (cascade_level as u8 as u32) << SOURCE_ORDER_BITS;
SourceOrderAndCascadeLevel(bits)
}
fn order(&self) -> u32 {
self.0 & SOURCE_ORDER_MASK
}
fn level(&self) -> CascadeLevel {
unsafe {
// Transmute rather than shifting so that we're sure the compiler
// emits a simple byte-aligned load.
let as_bytes: [u8; 4] = mem::transmute(self.0);
CascadeLevel::from_byte(as_bytes[3])
}
}
}
impl Debug for SourceOrderAndCascadeLevel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceOrderAndCascadeLevel")
.field("order", &self.order())
.field("level", &self.level())
.finish()
}
}
/// A property declaration together with its precedence among rules of equal
/// specificity so that we can sort them.
///
/// This represents the declarations in a given declaration block for a given
/// importance.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Debug, Clone, PartialEq)]
pub struct ApplicableDeclarationBlock {
/// The style source, either a style rule, or a property declaration block.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub source: StyleSource,
/// The source order of the block, and the cascade level it belongs to.
order_and_level: SourceOrderAndCascadeLevel,
/// The specificity of the selector this block is represented by.
pub specificity: u32,
}
impl ApplicableDeclarationBlock {
/// Constructs an applicable declaration block from a given property
/// declaration block and importance.
#[inline]
pub fn from_declarations(declarations: Arc<Locked<PropertyDeclarationBlock>>,
level: CascadeLevel)
-> Self {
ApplicableDeclarationBlock {
source: StyleSource::Declarations(declarations),
order_and_level: SourceOrderAndCascadeLevel::new(0, level),
specificity: 0,
}
}
/// Constructs an applicable declaration block from the given components
#[inline]
pub fn new(source: StyleSource,
order: u32,
level: CascadeLevel,
specificity: u32) -> Self {
ApplicableDeclarationBlock {
source: source,
order_and_level: SourceOrderAndCascadeLevel::new(order, level),
specificity: specificity,
}
}
/// Returns the source order of the block.
#[inline]
pub fn | (&self) -> u32 {
self.order_and_level.order()
}
/// Returns the cascade level of the block.
#[inline]
pub fn level(&self) -> CascadeLevel {
self.order_and_level.level()
}
/// Convenience method to consume self and return the source alongside the
/// level.
#[inline]
pub fn order_and_level(self) -> (StyleSource, CascadeLevel) {
let level = self.level();
(self.source, level)
}
}
| source_order | identifier_name |
applicable_declarations.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Applicable declarations management.
use properties::PropertyDeclarationBlock;
use rule_tree::{CascadeLevel, StyleSource};
use servo_arc::Arc;
use shared_lock::Locked;
use smallvec::SmallVec;
use std::fmt::{Debug, self};
use std::mem;
/// List of applicable declarations. This is a transient structure that shuttles
/// declarations between selector matching and inserting into the rule tree, and
/// therefore we want to avoid heap-allocation where possible.
///
/// In measurements on wikipedia, we pretty much never have more than 8 applicable
/// declarations, so we could consider making this 8 entries instead of 16.
/// However, it may depend a lot on workload, and stack space is cheap.
pub type ApplicableDeclarationList = SmallVec<[ApplicableDeclarationBlock; 16]>;
/// Blink uses 18 bits to store source order, and does not check overflow [1].
/// That's a limit that could be reached in realistic webpages, so we use
/// 24 bits and enforce defined behavior in the overflow case.
///
/// Note that the value of 24 is also hard-coded into the level() accessor,
/// which does a byte-aligned load of the 4th byte. If you change this value
/// you'll need to change that as well.
///
/// [1] https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/css/
/// RuleSet.h?l=128&rcl=90140ab80b84d0f889abc253410f44ed54ae04f3
const SOURCE_ORDER_BITS: usize = 24;
const SOURCE_ORDER_MASK: u32 = (1 << SOURCE_ORDER_BITS) - 1;
const SOURCE_ORDER_MAX: u32 = SOURCE_ORDER_MASK;
/// Stores the source order of a block and the cascade level it belongs to.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Copy, Clone, Eq, PartialEq)]
struct SourceOrderAndCascadeLevel(u32);
impl SourceOrderAndCascadeLevel {
fn new(source_order: u32, cascade_level: CascadeLevel) -> SourceOrderAndCascadeLevel {
let mut bits = ::std::cmp::min(source_order, SOURCE_ORDER_MAX);
bits |= (cascade_level as u8 as u32) << SOURCE_ORDER_BITS;
SourceOrderAndCascadeLevel(bits)
}
fn order(&self) -> u32 {
self.0 & SOURCE_ORDER_MASK
}
fn level(&self) -> CascadeLevel {
unsafe {
// Transmute rather than shifting so that we're sure the compiler
// emits a simple byte-aligned load.
let as_bytes: [u8; 4] = mem::transmute(self.0);
CascadeLevel::from_byte(as_bytes[3])
}
}
}
impl Debug for SourceOrderAndCascadeLevel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceOrderAndCascadeLevel")
.field("order", &self.order())
.field("level", &self.level())
.finish()
}
}
/// A property declaration together with its precedence among rules of equal
/// specificity so that we can sort them.
///
/// This represents the declarations in a given declaration block for a given
/// importance.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Debug, Clone, PartialEq)]
pub struct ApplicableDeclarationBlock {
/// The style source, either a style rule, or a property declaration block.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub source: StyleSource,
/// The source order of the block, and the cascade level it belongs to.
order_and_level: SourceOrderAndCascadeLevel,
/// The specificity of the selector this block is represented by.
pub specificity: u32,
}
impl ApplicableDeclarationBlock {
/// Constructs an applicable declaration block from a given property
/// declaration block and importance.
#[inline]
pub fn from_declarations(declarations: Arc<Locked<PropertyDeclarationBlock>>,
level: CascadeLevel)
-> Self {
ApplicableDeclarationBlock {
source: StyleSource::Declarations(declarations),
order_and_level: SourceOrderAndCascadeLevel::new(0, level),
specificity: 0,
}
}
/// Constructs an applicable declaration block from the given components
#[inline]
pub fn new(source: StyleSource,
order: u32,
level: CascadeLevel,
specificity: u32) -> Self {
ApplicableDeclarationBlock {
source: source,
order_and_level: SourceOrderAndCascadeLevel::new(order, level),
specificity: specificity,
}
}
/// Returns the source order of the block.
#[inline]
pub fn source_order(&self) -> u32 {
self.order_and_level.order()
}
/// Returns the cascade level of the block.
#[inline]
pub fn level(&self) -> CascadeLevel {
self.order_and_level.level()
}
/// Convenience method to consume self and return the source alongside the
/// level.
#[inline]
pub fn order_and_level(self) -> (StyleSource, CascadeLevel) |
}
| {
let level = self.level();
(self.source, level)
} | identifier_body |
applicable_declarations.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Applicable declarations management.
use properties::PropertyDeclarationBlock;
use rule_tree::{CascadeLevel, StyleSource};
use servo_arc::Arc;
use shared_lock::Locked;
use smallvec::SmallVec;
use std::fmt::{Debug, self};
use std::mem;
/// List of applicable declarations. This is a transient structure that shuttles
/// declarations between selector matching and inserting into the rule tree, and
/// therefore we want to avoid heap-allocation where possible.
///
/// In measurements on wikipedia, we pretty much never have more than 8 applicable
/// declarations, so we could consider making this 8 entries instead of 16.
/// However, it may depend a lot on workload, and stack space is cheap.
pub type ApplicableDeclarationList = SmallVec<[ApplicableDeclarationBlock; 16]>;
/// Blink uses 18 bits to store source order, and does not check overflow [1].
/// That's a limit that could be reached in realistic webpages, so we use
/// 24 bits and enforce defined behavior in the overflow case.
///
/// Note that the value of 24 is also hard-coded into the level() accessor,
/// which does a byte-aligned load of the 4th byte. If you change this value
/// you'll need to change that as well.
///
/// [1] https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/css/
/// RuleSet.h?l=128&rcl=90140ab80b84d0f889abc253410f44ed54ae04f3
const SOURCE_ORDER_BITS: usize = 24;
const SOURCE_ORDER_MASK: u32 = (1 << SOURCE_ORDER_BITS) - 1;
const SOURCE_ORDER_MAX: u32 = SOURCE_ORDER_MASK;
/// Stores the source order of a block and the cascade level it belongs to.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Copy, Clone, Eq, PartialEq)]
struct SourceOrderAndCascadeLevel(u32);
impl SourceOrderAndCascadeLevel {
fn new(source_order: u32, cascade_level: CascadeLevel) -> SourceOrderAndCascadeLevel {
let mut bits = ::std::cmp::min(source_order, SOURCE_ORDER_MAX);
bits |= (cascade_level as u8 as u32) << SOURCE_ORDER_BITS;
SourceOrderAndCascadeLevel(bits)
}
fn order(&self) -> u32 {
self.0 & SOURCE_ORDER_MASK
}
fn level(&self) -> CascadeLevel {
unsafe {
// Transmute rather than shifting so that we're sure the compiler
// emits a simple byte-aligned load.
let as_bytes: [u8; 4] = mem::transmute(self.0);
CascadeLevel::from_byte(as_bytes[3])
}
}
}
impl Debug for SourceOrderAndCascadeLevel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceOrderAndCascadeLevel")
.field("order", &self.order())
.field("level", &self.level())
.finish()
}
}
/// A property declaration together with its precedence among rules of equal
/// specificity so that we can sort them.
///
/// This represents the declarations in a given declaration block for a given
/// importance.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Debug, Clone, PartialEq)]
pub struct ApplicableDeclarationBlock {
/// The style source, either a style rule, or a property declaration block.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub source: StyleSource,
/// The source order of the block, and the cascade level it belongs to.
order_and_level: SourceOrderAndCascadeLevel,
/// The specificity of the selector this block is represented by.
pub specificity: u32,
}
impl ApplicableDeclarationBlock {
/// Constructs an applicable declaration block from a given property
/// declaration block and importance.
#[inline]
pub fn from_declarations(declarations: Arc<Locked<PropertyDeclarationBlock>>,
level: CascadeLevel)
-> Self {
ApplicableDeclarationBlock {
source: StyleSource::Declarations(declarations),
order_and_level: SourceOrderAndCascadeLevel::new(0, level),
specificity: 0,
}
}
/// Constructs an applicable declaration block from the given components
#[inline]
pub fn new(source: StyleSource,
order: u32,
level: CascadeLevel,
specificity: u32) -> Self {
ApplicableDeclarationBlock {
source: source,
order_and_level: SourceOrderAndCascadeLevel::new(order, level),
specificity: specificity,
}
}
/// Returns the source order of the block.
#[inline]
pub fn source_order(&self) -> u32 {
self.order_and_level.order()
}
| pub fn level(&self) -> CascadeLevel {
self.order_and_level.level()
}
/// Convenience method to consume self and return the source alongside the
/// level.
#[inline]
pub fn order_and_level(self) -> (StyleSource, CascadeLevel) {
let level = self.level();
(self.source, level)
}
} | /// Returns the cascade level of the block.
#[inline] | random_line_split |
category-list.component.ts | import {Component, OnInit} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {HttpClient} from '@angular/common/http';
import {CategorySummary} from '../category-summary';
import {DiffsUtil} from '../../util/diffs-util';
import {CrudItemState} from '../../widgets/crud-list/crut-item-state';
import {CrudHelper, CrudResource} from '../../util/crud-helper';
@Component({
selector: 'ws-category-list',
template: `
<h1>Kategorie</h1>
<ng-container *ngIf="categories">
<ws-panel>
<h2>Dodaj kategorię</h2>
<div>
<ws-crud-item-component [state]="State.EDIT" [cancelable]="false" (itemSave)="resource.added($event)">
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-item-component>
</div>
</ws-panel>
<ws-panel>
<h2>Lista kategorii</h2>
<ws-crud-list-component [data]="categories" (itemSave)="resource.edited($event)">
<ng-template let-category #itemSummary>
{{category.name}}
<span class="category-path" *ngIf="category.path.length > 0"> ({{category.path | joinArray:' > '}})</span>
</ng-template>
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-list-component>
</ws-panel>
</ng-container>
`,
styleUrls: [
'category-list.component.scss'
],
providers: [
CrudHelper
]
})
export class CategoryListComponent implements OnInit {
categories: CategorySummary[];
State = CrudItemState;
resource: CrudResource<CategorySummary>;
private type: string;
constructor(private httpClient: HttpClient, private route: ActivatedRoute, crudHelper: CrudHelper<CategorySummary>) {
this.resource = crudHelper.asResource({
api: '/api/v1/category',
mapper: crudItemSave => ({
type: this.type,
...DiffsUtil.diff(crudItemSave.changed, crudItemSave.item, {
name: 'name',
parentUuid: 'parentUuid'
})
}),
onSuccess: () => this.fetchCategories(this.type)
});
}
ngOnInit(): void { |
private fetchCategories(type) {
this.httpClient
.get<CategorySummary[]>(`/api/v1/category/${type}`)
.subscribe(categories => this.categories = categories);
}
}
|
this.route
.paramMap
.map(map => map.get('type'))
.subscribe(data => this.type = `_category_${data}`);
this.route
.paramMap
.map(map => map.get('type'))
.map(type => `_category_${type}`)
.subscribe(this.fetchCategories.bind(this));
}
| identifier_body |
category-list.component.ts | import {Component, OnInit} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {HttpClient} from '@angular/common/http';
import {CategorySummary} from '../category-summary';
import {DiffsUtil} from '../../util/diffs-util';
import {CrudItemState} from '../../widgets/crud-list/crut-item-state';
import {CrudHelper, CrudResource} from '../../util/crud-helper';
@Component({
selector: 'ws-category-list',
template: `
<h1>Kategorie</h1>
<ng-container *ngIf="categories">
<ws-panel>
<h2>Dodaj kategorię</h2>
<div>
<ws-crud-item-component [state]="State.EDIT" [cancelable]="false" (itemSave)="resource.added($event)">
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-item-component>
</div>
</ws-panel>
<ws-panel>
<h2>Lista kategorii</h2>
<ws-crud-list-component [data]="categories" (itemSave)="resource.edited($event)">
<ng-template let-category #itemSummary>
{{category.name}}
<span class="category-path" *ngIf="category.path.length > 0"> ({{category.path | joinArray:' > '}})</span>
</ng-template>
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-list-component>
</ws-panel>
</ng-container>
`,
styleUrls: [
'category-list.component.scss'
],
providers: [
CrudHelper
]
})
export class CategoryListComponent implements OnInit {
categories: CategorySummary[];
State = CrudItemState;
resource: CrudResource<CategorySummary>;
private type: string;
constructor(private httpClient: HttpClient, private route: ActivatedRoute, crudHelper: CrudHelper<CategorySummary>) {
this.resource = crudHelper.asResource({
api: '/api/v1/category',
mapper: crudItemSave => ({
type: this.type,
...DiffsUtil.diff(crudItemSave.changed, crudItemSave.item, {
name: 'name',
parentUuid: 'parentUuid'
})
}),
onSuccess: () => this.fetchCategories(this.type)
});
}
ngOnInit(): void {
this.route
.paramMap
.map(map => map.get('type'))
.subscribe(data => this.type = `_category_${data}`);
this.route
.paramMap
.map(map => map.get('type'))
.map(type => `_category_${type}`)
.subscribe(this.fetchCategories.bind(this));
}
private fetchCategories(type) {
this.httpClient | } | .get<CategorySummary[]>(`/api/v1/category/${type}`)
.subscribe(categories => this.categories = categories);
}
| random_line_split |
category-list.component.ts | import {Component, OnInit} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {HttpClient} from '@angular/common/http';
import {CategorySummary} from '../category-summary';
import {DiffsUtil} from '../../util/diffs-util';
import {CrudItemState} from '../../widgets/crud-list/crut-item-state';
import {CrudHelper, CrudResource} from '../../util/crud-helper';
@Component({
selector: 'ws-category-list',
template: `
<h1>Kategorie</h1>
<ng-container *ngIf="categories">
<ws-panel>
<h2>Dodaj kategorię</h2>
<div>
<ws-crud-item-component [state]="State.EDIT" [cancelable]="false" (itemSave)="resource.added($event)">
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-item-component>
</div>
</ws-panel>
<ws-panel>
<h2>Lista kategorii</h2>
<ws-crud-list-component [data]="categories" (itemSave)="resource.edited($event)">
<ng-template let-category #itemSummary>
{{category.name}}
<span class="category-path" *ngIf="category.path.length > 0"> ({{category.path | joinArray:' > '}})</span>
</ng-template>
<ng-template let-category #itemEdit>
<ws-category-edit [category]="category" [categories]="categories"></ws-category-edit>
</ng-template>
</ws-crud-list-component>
</ws-panel>
</ng-container>
`,
styleUrls: [
'category-list.component.scss'
],
providers: [
CrudHelper
]
})
export class CategoryListComponent implements OnInit {
categories: CategorySummary[];
State = CrudItemState;
resource: CrudResource<CategorySummary>;
private type: string;
constructor(private httpClient: HttpClient, private route: ActivatedRoute, crudHelper: CrudHelper<CategorySummary>) {
this.resource = crudHelper.asResource({
api: '/api/v1/category',
mapper: crudItemSave => ({
type: this.type,
...DiffsUtil.diff(crudItemSave.changed, crudItemSave.item, {
name: 'name',
parentUuid: 'parentUuid'
})
}),
onSuccess: () => this.fetchCategories(this.type)
});
}
n | ): void {
this.route
.paramMap
.map(map => map.get('type'))
.subscribe(data => this.type = `_category_${data}`);
this.route
.paramMap
.map(map => map.get('type'))
.map(type => `_category_${type}`)
.subscribe(this.fetchCategories.bind(this));
}
private fetchCategories(type) {
this.httpClient
.get<CategorySummary[]>(`/api/v1/category/${type}`)
.subscribe(categories => this.categories = categories);
}
}
| gOnInit( | identifier_name |
passport-local-tests.ts | /// <reference path="./passport-local.d.ts" />
/**
* Created by Maxime LUCE <https://github.com/SomaticIT>.
*/
import express = require("express");
import passport = require('passport');
import local = require('passport-local');
//#region Test Models
interface IUser {
username: string;
}
class User implements IUser {
public username: string;
public password: string;
static findOne(user: IUser, callback: (err: Error, user: User) => void): void {
callback(null, new User());
}
| (password: string): boolean {
return true;
}
}
//#endregion
// Sample from https://github.com/jaredhanson/passport-local#configure-strategy
passport.use(new local.Strategy((username: any, password: any, done: any) => {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
passport.use(new local.Strategy({
passReqToCallback: true
}, function (req, username, password, done) {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
// Sample from https://github.com/jaredhanson/passport-local#authenticate-requests
var app = express();
app.post('/login',
passport.authenticate('local', { failureRedirect: '/login' }),
function (req, res) {
res.redirect('/');
});
| verifyPassword | identifier_name |
passport-local-tests.ts | /// <reference path="./passport-local.d.ts" />
/**
* Created by Maxime LUCE <https://github.com/SomaticIT>.
*/
import express = require("express");
import passport = require('passport');
import local = require('passport-local');
//#region Test Models
interface IUser {
username: string;
}
class User implements IUser {
public username: string;
public password: string;
static findOne(user: IUser, callback: (err: Error, user: User) => void): void {
callback(null, new User());
}
verifyPassword(password: string): boolean {
return true;
}
}
//#endregion
// Sample from https://github.com/jaredhanson/passport-local#configure-strategy
passport.use(new local.Strategy((username: any, password: any, done: any) => {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
passport.use(new local.Strategy({
passReqToCallback: true
}, function (req, username, password, done) {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
| return done(null, false);
}
return done(null, user);
});
}));
// Sample from https://github.com/jaredhanson/passport-local#authenticate-requests
var app = express();
app.post('/login',
passport.authenticate('local', { failureRedirect: '/login' }),
function (req, res) {
res.redirect('/');
}); | if (!user.verifyPassword(password)) { | random_line_split |
passport-local-tests.ts | /// <reference path="./passport-local.d.ts" />
/**
* Created by Maxime LUCE <https://github.com/SomaticIT>.
*/
import express = require("express");
import passport = require('passport');
import local = require('passport-local');
//#region Test Models
interface IUser {
username: string;
}
class User implements IUser {
public username: string;
public password: string;
static findOne(user: IUser, callback: (err: Error, user: User) => void): void {
callback(null, new User());
}
verifyPassword(password: string): boolean {
return true;
}
}
//#endregion
// Sample from https://github.com/jaredhanson/passport-local#configure-strategy
passport.use(new local.Strategy((username: any, password: any, done: any) => {
User.findOne({ username: username }, function (err, user) {
if (err) |
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
passport.use(new local.Strategy({
passReqToCallback: true
}, function (req, username, password, done) {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
// Sample from https://github.com/jaredhanson/passport-local#authenticate-requests
var app = express();
app.post('/login',
passport.authenticate('local', { failureRedirect: '/login' }),
function (req, res) {
res.redirect('/');
});
| {
return done(err);
} | conditional_block |
passport-local-tests.ts | /// <reference path="./passport-local.d.ts" />
/**
* Created by Maxime LUCE <https://github.com/SomaticIT>.
*/
import express = require("express");
import passport = require('passport');
import local = require('passport-local');
//#region Test Models
interface IUser {
username: string;
}
class User implements IUser {
public username: string;
public password: string;
static findOne(user: IUser, callback: (err: Error, user: User) => void): void |
verifyPassword(password: string): boolean {
return true;
}
}
//#endregion
// Sample from https://github.com/jaredhanson/passport-local#configure-strategy
passport.use(new local.Strategy((username: any, password: any, done: any) => {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
passport.use(new local.Strategy({
passReqToCallback: true
}, function (req, username, password, done) {
User.findOne({ username: username }, function (err, user) {
if (err) {
return done(err);
}
if (!user) {
return done(null, false);
}
if (!user.verifyPassword(password)) {
return done(null, false);
}
return done(null, user);
});
}));
// Sample from https://github.com/jaredhanson/passport-local#authenticate-requests
var app = express();
app.post('/login',
passport.authenticate('local', { failureRedirect: '/login' }),
function (req, res) {
res.redirect('/');
});
| {
callback(null, new User());
} | identifier_body |
org_app.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC Organization Application.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
] |
from django.conf.urls.defaults import url
from soc.logic.models.org_app_survey import logic as org_app_logic
from soc.views import forms
from soc.views.models.org_app_survey import OrgAppSurveyForm
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import url_patterns
class OrgAppForm(OrgAppSurveyForm, forms.ModelForm):
"""Form for Organization Applications inherited from Surveys.
"""
#TODO: Rewrite this class while refactoring surveys
def __init__(self, *args, **kwargs):
"""Act as a bridge between the new Forms APIs and the existing Survey
Form classes.
"""
kwargs.update({
'survey': kwargs.get('instance', None),
'survey_logic': org_app_logic,
})
super(OrgAppForm, self).__init__(*args, **kwargs)
class OrgApp(RequestHandler):
"""View methods for Organization Application Applications.
"""
def templatePath(self):
return 'v2/modules/gsoc/org_app/apply.html'
def djangoURLPatterns(self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/org_app/apply/%s$' % url_patterns.SURVEY, self,
name='gsoc_org_app_apply')
]
def checkAccess(self):
"""Access checks for GSoC Organization Application.
"""
pass
def context(self):
"""Handler to for GSoC Organization Application HTTP get request.
"""
org_app_keyfields = {
'prefix': self.kwargs.get('prefix'),
'scope_path': '%s/%s' % (self.kwargs.get('sponsor'),
self.kwargs.get('program')),
'link_id': self.kwargs.get('survey'),
}
org_app_entity = org_app_logic.getFromKeyFieldsOr404(org_app_keyfields)
if self.data.request.method == 'POST':
org_app_form = OrgAppForm(self.data.POST, instance=org_app_entity)
else:
org_app_form = OrgAppForm(instance=org_app_entity)
return {
'page_name': 'Organization Application',
'org_app_form': org_app_form,
} | random_line_split |
|
org_app.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC Organization Application.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
]
from django.conf.urls.defaults import url
from soc.logic.models.org_app_survey import logic as org_app_logic
from soc.views import forms
from soc.views.models.org_app_survey import OrgAppSurveyForm
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import url_patterns
class OrgAppForm(OrgAppSurveyForm, forms.ModelForm):
"""Form for Organization Applications inherited from Surveys.
"""
#TODO: Rewrite this class while refactoring surveys
def __init__(self, *args, **kwargs):
"""Act as a bridge between the new Forms APIs and the existing Survey
Form classes.
"""
kwargs.update({
'survey': kwargs.get('instance', None),
'survey_logic': org_app_logic,
})
super(OrgAppForm, self).__init__(*args, **kwargs)
class OrgApp(RequestHandler):
"""View methods for Organization Application Applications.
"""
def templatePath(self):
return 'v2/modules/gsoc/org_app/apply.html'
def | (self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/org_app/apply/%s$' % url_patterns.SURVEY, self,
name='gsoc_org_app_apply')
]
def checkAccess(self):
"""Access checks for GSoC Organization Application.
"""
pass
def context(self):
"""Handler to for GSoC Organization Application HTTP get request.
"""
org_app_keyfields = {
'prefix': self.kwargs.get('prefix'),
'scope_path': '%s/%s' % (self.kwargs.get('sponsor'),
self.kwargs.get('program')),
'link_id': self.kwargs.get('survey'),
}
org_app_entity = org_app_logic.getFromKeyFieldsOr404(org_app_keyfields)
if self.data.request.method == 'POST':
org_app_form = OrgAppForm(self.data.POST, instance=org_app_entity)
else:
org_app_form = OrgAppForm(instance=org_app_entity)
return {
'page_name': 'Organization Application',
'org_app_form': org_app_form,
}
| djangoURLPatterns | identifier_name |
org_app.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC Organization Application.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
]
from django.conf.urls.defaults import url
from soc.logic.models.org_app_survey import logic as org_app_logic
from soc.views import forms
from soc.views.models.org_app_survey import OrgAppSurveyForm
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import url_patterns
class OrgAppForm(OrgAppSurveyForm, forms.ModelForm):
|
class OrgApp(RequestHandler):
"""View methods for Organization Application Applications.
"""
def templatePath(self):
return 'v2/modules/gsoc/org_app/apply.html'
def djangoURLPatterns(self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/org_app/apply/%s$' % url_patterns.SURVEY, self,
name='gsoc_org_app_apply')
]
def checkAccess(self):
"""Access checks for GSoC Organization Application.
"""
pass
def context(self):
"""Handler to for GSoC Organization Application HTTP get request.
"""
org_app_keyfields = {
'prefix': self.kwargs.get('prefix'),
'scope_path': '%s/%s' % (self.kwargs.get('sponsor'),
self.kwargs.get('program')),
'link_id': self.kwargs.get('survey'),
}
org_app_entity = org_app_logic.getFromKeyFieldsOr404(org_app_keyfields)
if self.data.request.method == 'POST':
org_app_form = OrgAppForm(self.data.POST, instance=org_app_entity)
else:
org_app_form = OrgAppForm(instance=org_app_entity)
return {
'page_name': 'Organization Application',
'org_app_form': org_app_form,
}
| """Form for Organization Applications inherited from Surveys.
"""
#TODO: Rewrite this class while refactoring surveys
def __init__(self, *args, **kwargs):
"""Act as a bridge between the new Forms APIs and the existing Survey
Form classes.
"""
kwargs.update({
'survey': kwargs.get('instance', None),
'survey_logic': org_app_logic,
})
super(OrgAppForm, self).__init__(*args, **kwargs) | identifier_body |
org_app.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC Organization Application.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
]
from django.conf.urls.defaults import url
from soc.logic.models.org_app_survey import logic as org_app_logic
from soc.views import forms
from soc.views.models.org_app_survey import OrgAppSurveyForm
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import url_patterns
class OrgAppForm(OrgAppSurveyForm, forms.ModelForm):
"""Form for Organization Applications inherited from Surveys.
"""
#TODO: Rewrite this class while refactoring surveys
def __init__(self, *args, **kwargs):
"""Act as a bridge between the new Forms APIs and the existing Survey
Form classes.
"""
kwargs.update({
'survey': kwargs.get('instance', None),
'survey_logic': org_app_logic,
})
super(OrgAppForm, self).__init__(*args, **kwargs)
class OrgApp(RequestHandler):
"""View methods for Organization Application Applications.
"""
def templatePath(self):
return 'v2/modules/gsoc/org_app/apply.html'
def djangoURLPatterns(self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/org_app/apply/%s$' % url_patterns.SURVEY, self,
name='gsoc_org_app_apply')
]
def checkAccess(self):
"""Access checks for GSoC Organization Application.
"""
pass
def context(self):
"""Handler to for GSoC Organization Application HTTP get request.
"""
org_app_keyfields = {
'prefix': self.kwargs.get('prefix'),
'scope_path': '%s/%s' % (self.kwargs.get('sponsor'),
self.kwargs.get('program')),
'link_id': self.kwargs.get('survey'),
}
org_app_entity = org_app_logic.getFromKeyFieldsOr404(org_app_keyfields)
if self.data.request.method == 'POST':
|
else:
org_app_form = OrgAppForm(instance=org_app_entity)
return {
'page_name': 'Organization Application',
'org_app_form': org_app_form,
}
| org_app_form = OrgAppForm(self.data.POST, instance=org_app_entity) | conditional_block |
task_arc_wake.rs | use futures::task::{self, ArcWake, Waker};
use std::panic;
use std::sync::{Arc, Mutex};
struct | {
nr_wake: Mutex<i32>,
}
impl CountingWaker {
fn new() -> Self {
Self { nr_wake: Mutex::new(0) }
}
fn wakes(&self) -> i32 {
*self.nr_wake.lock().unwrap()
}
}
impl ArcWake for CountingWaker {
fn wake_by_ref(arc_self: &Arc<Self>) {
let mut lock = arc_self.nr_wake.lock().unwrap();
*lock += 1;
}
}
#[test]
fn create_from_arc() {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
assert_eq!(2, Arc::strong_count(&some_w));
w1.wake_by_ref();
assert_eq!(1, some_w.wakes());
let w2 = w1.clone();
assert_eq!(3, Arc::strong_count(&some_w));
w2.wake_by_ref();
assert_eq!(2, some_w.wakes());
drop(w2);
assert_eq!(2, Arc::strong_count(&some_w));
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w));
}
#[test]
fn ref_wake_same() {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
let w2 = task::waker_ref(&some_w);
let w3 = w2.clone();
assert!(w1.will_wake(&w2));
assert!(w2.will_wake(&w3));
}
#[test]
fn proper_refcount_on_wake_panic() {
struct PanicWaker;
impl ArcWake for PanicWaker {
fn wake_by_ref(_arc_self: &Arc<Self>) {
panic!("WAKE UP");
}
}
let some_w = Arc::new(PanicWaker);
let w1: Waker = task::waker(some_w.clone());
assert_eq!(
"WAKE UP",
*panic::catch_unwind(|| w1.wake_by_ref()).unwrap_err().downcast::<&str>().unwrap()
);
assert_eq!(2, Arc::strong_count(&some_w)); // some_w + w1
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w)); // some_w
}
| CountingWaker | identifier_name |
task_arc_wake.rs | use futures::task::{self, ArcWake, Waker};
use std::panic;
use std::sync::{Arc, Mutex};
struct CountingWaker {
nr_wake: Mutex<i32>,
}
impl CountingWaker {
fn new() -> Self {
Self { nr_wake: Mutex::new(0) }
}
fn wakes(&self) -> i32 {
*self.nr_wake.lock().unwrap()
}
}
impl ArcWake for CountingWaker {
fn wake_by_ref(arc_self: &Arc<Self>) {
let mut lock = arc_self.nr_wake.lock().unwrap();
*lock += 1;
}
}
#[test]
fn create_from_arc() |
#[test]
fn ref_wake_same() {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
let w2 = task::waker_ref(&some_w);
let w3 = w2.clone();
assert!(w1.will_wake(&w2));
assert!(w2.will_wake(&w3));
}
#[test]
fn proper_refcount_on_wake_panic() {
struct PanicWaker;
impl ArcWake for PanicWaker {
fn wake_by_ref(_arc_self: &Arc<Self>) {
panic!("WAKE UP");
}
}
let some_w = Arc::new(PanicWaker);
let w1: Waker = task::waker(some_w.clone());
assert_eq!(
"WAKE UP",
*panic::catch_unwind(|| w1.wake_by_ref()).unwrap_err().downcast::<&str>().unwrap()
);
assert_eq!(2, Arc::strong_count(&some_w)); // some_w + w1
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w)); // some_w
}
| {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
assert_eq!(2, Arc::strong_count(&some_w));
w1.wake_by_ref();
assert_eq!(1, some_w.wakes());
let w2 = w1.clone();
assert_eq!(3, Arc::strong_count(&some_w));
w2.wake_by_ref();
assert_eq!(2, some_w.wakes());
drop(w2);
assert_eq!(2, Arc::strong_count(&some_w));
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w));
} | identifier_body |
task_arc_wake.rs | use futures::task::{self, ArcWake, Waker};
use std::panic;
use std::sync::{Arc, Mutex};
struct CountingWaker {
nr_wake: Mutex<i32>,
}
impl CountingWaker {
fn new() -> Self {
Self { nr_wake: Mutex::new(0) }
}
fn wakes(&self) -> i32 {
*self.nr_wake.lock().unwrap()
}
}
impl ArcWake for CountingWaker {
fn wake_by_ref(arc_self: &Arc<Self>) {
let mut lock = arc_self.nr_wake.lock().unwrap();
*lock += 1;
}
}
#[test]
fn create_from_arc() {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
assert_eq!(2, Arc::strong_count(&some_w));
w1.wake_by_ref();
assert_eq!(1, some_w.wakes());
let w2 = w1.clone();
assert_eq!(3, Arc::strong_count(&some_w));
w2.wake_by_ref();
assert_eq!(2, some_w.wakes());
drop(w2);
assert_eq!(2, Arc::strong_count(&some_w));
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w));
}
#[test]
fn ref_wake_same() {
let some_w = Arc::new(CountingWaker::new());
let w1: Waker = task::waker(some_w.clone());
let w2 = task::waker_ref(&some_w);
let w3 = w2.clone(); |
#[test]
fn proper_refcount_on_wake_panic() {
struct PanicWaker;
impl ArcWake for PanicWaker {
fn wake_by_ref(_arc_self: &Arc<Self>) {
panic!("WAKE UP");
}
}
let some_w = Arc::new(PanicWaker);
let w1: Waker = task::waker(some_w.clone());
assert_eq!(
"WAKE UP",
*panic::catch_unwind(|| w1.wake_by_ref()).unwrap_err().downcast::<&str>().unwrap()
);
assert_eq!(2, Arc::strong_count(&some_w)); // some_w + w1
drop(w1);
assert_eq!(1, Arc::strong_count(&some_w)); // some_w
} |
assert!(w1.will_wake(&w2));
assert!(w2.will_wake(&w3));
} | random_line_split |
static.rs | const FILE_GENERIC_READ: DWORD =
STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE;
static boolnames: &'static [&'static str] = &[ |
static mut name: SomeType =
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa;
pub static count: u8 = 10;
pub const test: &Type = &val;
impl Color {
pub const WHITE: u32 = 10;
}
// #1391
pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX: NTSTATUS =
0 as usize;
pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX:
Yyyyyyyyyyyyyyyyyyyyyyyyyyyy = 1; | "bw", "am", "xsb", "xhp", "xenl", "eo", "gn", "hc", "km", "hs", "in", "db", "da", "mir",
"msgr", "os", "eslok", "xt", "hz", "ul", "xon", "nxon", "mc5i", "chts", "nrrmc", "npc",
"ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy", "xvpa", "sam", "cpix", "lpix", "OTbs",
"OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr",
]; | random_line_split |
datasource.ts | import _ from 'lodash';
import { Observable, of } from 'rxjs';
import { map } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { DataQueryResponse, ScopedVars } from '@grafana/data';
import ResponseParser from './response_parser';
import PostgresQuery from 'app/plugins/datasource/postgres/postgres_query';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
//Types
import { PostgresMetricFindValue, PostgresQueryForInterpolation } from './types';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
export class PostgresDatasource {
id: any;
name: any;
jsonData: any;
responseParser: ResponseParser;
queryModel: PostgresQuery;
interval: string;
constructor(
instanceSettings: { name: any; id?: any; jsonData?: any },
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.jsonData = instanceSettings.jsonData;
this.responseParser = new ResponseParser();
this.queryModel = new PostgresQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
}
interpolateVariable = (value: string | string[], variable: { multi: any; includeAll: any }) => {
if (typeof value === 'string') {
if (variable.multi || variable.includeAll) {
return this.queryModel.quoteLiteral(value);
} else {
return value;
}
}
if (typeof value === 'number') {
return value;
}
const quotedValues = _.map(value, v => {
return this.queryModel.quoteLiteral(v);
});
return quotedValues.join(',');
};
interpolateVariablesInQueries(
queries: PostgresQueryForInterpolation[],
scopedVars: ScopedVars
): PostgresQueryForInterpolation[] {
let expandedQueries = queries;
if (queries && queries.length > 0) {
expandedQueries = queries.map(query => {
const expandedQuery = {
...query,
datasource: this.name,
rawSql: this.templateSrv.replace(query.rawSql, scopedVars, this.interpolateVariable),
rawQuery: true,
};
return expandedQuery;
});
}
return expandedQueries;
}
query(options: any): Observable<DataQueryResponse> {
const queries = _.filter(options.targets, target => {
return target.hide !== true;
}).map(target => {
const queryModel = new PostgresQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
}
annotationQuery(options: any) {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
});
}
const query = {
refId: options.annotation.name,
datasourceId: this.id,
rawSql: this.templateSrv.replace(options.annotation.rawQuery, options.scopedVars, this.interpolateVariable),
format: 'table',
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.toPromise();
}
metricFindQuery(
query: string,
optionalOptions: { variable?: any; searchFilter?: string }
): Promise<PostgresMetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
}
const rawSql = this.templateSrv.replace(
query,
getSearchFilterScopedVar({ query, wildcardChar: '%', options: optionalOptions }),
this.interpolateVariable
);
const interpolatedQuery = {
refId: refId,
datasourceId: this.id,
rawSql,
format: 'table',
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: data,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.toPromise();
}
getVersion() {
return this.metricFindQuery("SELECT current_setting('server_version_num')::int/100", {});
}
getTimescaleDBVersion() {
return this.metricFindQuery("SELECT extversion FROM pg_extension WHERE extname = 'timescaledb'", {});
}
testDatasource() {
return this.metricFindQuery('SELECT 1', {})
.then((res: any) => {
return { status: 'success', message: 'Database Connection OK' };
})
.catch((err: any) => {
console.error(err);
if (err.data && err.data.message) {
return { status: 'error', message: err.data.message };
} else {
return { status: 'error', message: err.status };
}
});
}
| (target: any) {
let rawSql = '';
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new PostgresQuery(target);
rawSql = query.buildQuery();
}
rawSql = rawSql.replace('$__', '');
return this.templateSrv.variableExists(rawSql);
}
}
| targetContainsTemplate | identifier_name |
datasource.ts | import _ from 'lodash';
import { Observable, of } from 'rxjs';
import { map } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { DataQueryResponse, ScopedVars } from '@grafana/data';
import ResponseParser from './response_parser';
import PostgresQuery from 'app/plugins/datasource/postgres/postgres_query';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
//Types
import { PostgresMetricFindValue, PostgresQueryForInterpolation } from './types';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
export class PostgresDatasource {
id: any;
name: any;
jsonData: any;
responseParser: ResponseParser;
queryModel: PostgresQuery;
interval: string;
constructor(
instanceSettings: { name: any; id?: any; jsonData?: any },
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.jsonData = instanceSettings.jsonData;
this.responseParser = new ResponseParser();
this.queryModel = new PostgresQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
}
interpolateVariable = (value: string | string[], variable: { multi: any; includeAll: any }) => {
if (typeof value === 'string') {
if (variable.multi || variable.includeAll) {
return this.queryModel.quoteLiteral(value);
} else {
return value;
}
}
if (typeof value === 'number') |
const quotedValues = _.map(value, v => {
return this.queryModel.quoteLiteral(v);
});
return quotedValues.join(',');
};
interpolateVariablesInQueries(
queries: PostgresQueryForInterpolation[],
scopedVars: ScopedVars
): PostgresQueryForInterpolation[] {
let expandedQueries = queries;
if (queries && queries.length > 0) {
expandedQueries = queries.map(query => {
const expandedQuery = {
...query,
datasource: this.name,
rawSql: this.templateSrv.replace(query.rawSql, scopedVars, this.interpolateVariable),
rawQuery: true,
};
return expandedQuery;
});
}
return expandedQueries;
}
query(options: any): Observable<DataQueryResponse> {
const queries = _.filter(options.targets, target => {
return target.hide !== true;
}).map(target => {
const queryModel = new PostgresQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
}
annotationQuery(options: any) {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
});
}
const query = {
refId: options.annotation.name,
datasourceId: this.id,
rawSql: this.templateSrv.replace(options.annotation.rawQuery, options.scopedVars, this.interpolateVariable),
format: 'table',
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.toPromise();
}
metricFindQuery(
query: string,
optionalOptions: { variable?: any; searchFilter?: string }
): Promise<PostgresMetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
}
const rawSql = this.templateSrv.replace(
query,
getSearchFilterScopedVar({ query, wildcardChar: '%', options: optionalOptions }),
this.interpolateVariable
);
const interpolatedQuery = {
refId: refId,
datasourceId: this.id,
rawSql,
format: 'table',
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: data,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.toPromise();
}
getVersion() {
return this.metricFindQuery("SELECT current_setting('server_version_num')::int/100", {});
}
getTimescaleDBVersion() {
return this.metricFindQuery("SELECT extversion FROM pg_extension WHERE extname = 'timescaledb'", {});
}
testDatasource() {
return this.metricFindQuery('SELECT 1', {})
.then((res: any) => {
return { status: 'success', message: 'Database Connection OK' };
})
.catch((err: any) => {
console.error(err);
if (err.data && err.data.message) {
return { status: 'error', message: err.data.message };
} else {
return { status: 'error', message: err.status };
}
});
}
targetContainsTemplate(target: any) {
let rawSql = '';
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new PostgresQuery(target);
rawSql = query.buildQuery();
}
rawSql = rawSql.replace('$__', '');
return this.templateSrv.variableExists(rawSql);
}
}
| {
return value;
} | conditional_block |
datasource.ts | import _ from 'lodash';
import { Observable, of } from 'rxjs';
import { map } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { DataQueryResponse, ScopedVars } from '@grafana/data';
import ResponseParser from './response_parser';
import PostgresQuery from 'app/plugins/datasource/postgres/postgres_query';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
//Types
import { PostgresMetricFindValue, PostgresQueryForInterpolation } from './types';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
export class PostgresDatasource {
id: any;
name: any;
jsonData: any;
responseParser: ResponseParser;
queryModel: PostgresQuery;
interval: string;
constructor(
instanceSettings: { name: any; id?: any; jsonData?: any },
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) |
interpolateVariable = (value: string | string[], variable: { multi: any; includeAll: any }) => {
if (typeof value === 'string') {
if (variable.multi || variable.includeAll) {
return this.queryModel.quoteLiteral(value);
} else {
return value;
}
}
if (typeof value === 'number') {
return value;
}
const quotedValues = _.map(value, v => {
return this.queryModel.quoteLiteral(v);
});
return quotedValues.join(',');
};
interpolateVariablesInQueries(
queries: PostgresQueryForInterpolation[],
scopedVars: ScopedVars
): PostgresQueryForInterpolation[] {
let expandedQueries = queries;
if (queries && queries.length > 0) {
expandedQueries = queries.map(query => {
const expandedQuery = {
...query,
datasource: this.name,
rawSql: this.templateSrv.replace(query.rawSql, scopedVars, this.interpolateVariable),
rawQuery: true,
};
return expandedQuery;
});
}
return expandedQueries;
}
query(options: any): Observable<DataQueryResponse> {
const queries = _.filter(options.targets, target => {
return target.hide !== true;
}).map(target => {
const queryModel = new PostgresQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
}
annotationQuery(options: any) {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
});
}
const query = {
refId: options.annotation.name,
datasourceId: this.id,
rawSql: this.templateSrv.replace(options.annotation.rawQuery, options.scopedVars, this.interpolateVariable),
format: 'table',
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.toPromise();
}
metricFindQuery(
query: string,
optionalOptions: { variable?: any; searchFilter?: string }
): Promise<PostgresMetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
}
const rawSql = this.templateSrv.replace(
query,
getSearchFilterScopedVar({ query, wildcardChar: '%', options: optionalOptions }),
this.interpolateVariable
);
const interpolatedQuery = {
refId: refId,
datasourceId: this.id,
rawSql,
format: 'table',
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: data,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.toPromise();
}
getVersion() {
return this.metricFindQuery("SELECT current_setting('server_version_num')::int/100", {});
}
getTimescaleDBVersion() {
return this.metricFindQuery("SELECT extversion FROM pg_extension WHERE extname = 'timescaledb'", {});
}
testDatasource() {
return this.metricFindQuery('SELECT 1', {})
.then((res: any) => {
return { status: 'success', message: 'Database Connection OK' };
})
.catch((err: any) => {
console.error(err);
if (err.data && err.data.message) {
return { status: 'error', message: err.data.message };
} else {
return { status: 'error', message: err.status };
}
});
}
targetContainsTemplate(target: any) {
let rawSql = '';
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new PostgresQuery(target);
rawSql = query.buildQuery();
}
rawSql = rawSql.replace('$__', '');
return this.templateSrv.variableExists(rawSql);
}
}
| {
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.jsonData = instanceSettings.jsonData;
this.responseParser = new ResponseParser();
this.queryModel = new PostgresQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
} | identifier_body |
datasource.ts | import _ from 'lodash';
import { Observable, of } from 'rxjs'; | import { DataQueryResponse, ScopedVars } from '@grafana/data';
import ResponseParser from './response_parser';
import PostgresQuery from 'app/plugins/datasource/postgres/postgres_query';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
//Types
import { PostgresMetricFindValue, PostgresQueryForInterpolation } from './types';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
export class PostgresDatasource {
id: any;
name: any;
jsonData: any;
responseParser: ResponseParser;
queryModel: PostgresQuery;
interval: string;
constructor(
instanceSettings: { name: any; id?: any; jsonData?: any },
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.jsonData = instanceSettings.jsonData;
this.responseParser = new ResponseParser();
this.queryModel = new PostgresQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
}
interpolateVariable = (value: string | string[], variable: { multi: any; includeAll: any }) => {
if (typeof value === 'string') {
if (variable.multi || variable.includeAll) {
return this.queryModel.quoteLiteral(value);
} else {
return value;
}
}
if (typeof value === 'number') {
return value;
}
const quotedValues = _.map(value, v => {
return this.queryModel.quoteLiteral(v);
});
return quotedValues.join(',');
};
interpolateVariablesInQueries(
queries: PostgresQueryForInterpolation[],
scopedVars: ScopedVars
): PostgresQueryForInterpolation[] {
let expandedQueries = queries;
if (queries && queries.length > 0) {
expandedQueries = queries.map(query => {
const expandedQuery = {
...query,
datasource: this.name,
rawSql: this.templateSrv.replace(query.rawSql, scopedVars, this.interpolateVariable),
rawQuery: true,
};
return expandedQuery;
});
}
return expandedQueries;
}
query(options: any): Observable<DataQueryResponse> {
const queries = _.filter(options.targets, target => {
return target.hide !== true;
}).map(target => {
const queryModel = new PostgresQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
}
annotationQuery(options: any) {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
});
}
const query = {
refId: options.annotation.name,
datasourceId: this.id,
rawSql: this.templateSrv.replace(options.annotation.rawQuery, options.scopedVars, this.interpolateVariable),
format: 'table',
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.toPromise();
}
metricFindQuery(
query: string,
optionalOptions: { variable?: any; searchFilter?: string }
): Promise<PostgresMetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
}
const rawSql = this.templateSrv.replace(
query,
getSearchFilterScopedVar({ query, wildcardChar: '%', options: optionalOptions }),
this.interpolateVariable
);
const interpolatedQuery = {
refId: refId,
datasourceId: this.id,
rawSql,
format: 'table',
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: data,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.toPromise();
}
getVersion() {
return this.metricFindQuery("SELECT current_setting('server_version_num')::int/100", {});
}
getTimescaleDBVersion() {
return this.metricFindQuery("SELECT extversion FROM pg_extension WHERE extname = 'timescaledb'", {});
}
testDatasource() {
return this.metricFindQuery('SELECT 1', {})
.then((res: any) => {
return { status: 'success', message: 'Database Connection OK' };
})
.catch((err: any) => {
console.error(err);
if (err.data && err.data.message) {
return { status: 'error', message: err.data.message };
} else {
return { status: 'error', message: err.status };
}
});
}
targetContainsTemplate(target: any) {
let rawSql = '';
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new PostgresQuery(target);
rawSql = query.buildQuery();
}
rawSql = rawSql.replace('$__', '');
return this.templateSrv.variableExists(rawSql);
}
} | import { map } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime'; | random_line_split |
gdocs.py | #!/usr/bin/env python
from exceptions import KeyError
import os
import requests
class GoogleDoc(object):
"""
A class for accessing a Google document as an object.
Includes the bits necessary for accessing the document and auth and such.
For example:
doc = {
"key": "123456abcdef",
"file_name": "my_google_doc",
"gid": "2"
}
g = GoogleDoc(**doc)
g.get_auth()
g.get_document()
Will download your google doc to data/file_name.format.
"""
# You can update these values with kwargs.
# In fact, you better pass a key or else it won't work!
key = None
file_format = 'xlsx'
file_name = 'copy'
gid = '0'
# You can change these with kwargs but it's not recommended.
spreadsheet_url = 'https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=%(key)s&exportFormat=%(format)s&gid=%(gid)s'
new_spreadsheet_url = 'https://docs.google.com/spreadsheets/d/%(key)s/export?format=%(format)s&id=%(key)s&gid=%(gid)s'
auth = None
email = os.environ.get('APPS_GOOGLE_EMAIL', None)
password = os.environ.get('APPS_GOOGLE_PASS', None)
scope = "https://spreadsheets.google.com/feeds/"
service = "wise"
session = "1"
def __init__(self, **kwargs):
"""
Because sometimes, just sometimes, you need to update the class when you instantiate it.
In this case, we need, minimally, a document key.
"""
if kwargs:
if kwargs.items():
for key, value in kwargs.items():
setattr(self, key, value)
def get_auth(self):
"""
Gets an authorization token and adds it to the class.
"""
data = {}
if not self.email or not self.password:
|
else:
data['Email'] = self.email
data['Passwd'] = self.password
data['scope'] = self.scope
data['service'] = self.service
data['session'] = self.session
r = requests.post("https://www.google.com/accounts/ClientLogin", data=data)
self.auth = r.content.split('\n')[2].split('Auth=')[1]
def get_document(self):
"""
Uses the authentication token to fetch a google doc.
"""
# Handle basically all the things that can go wrong.
if not self.auth:
raise KeyError("Error! You didn't get an auth token. Something very bad happened. File a bug?")
elif not self.key:
raise KeyError("Error! You forgot to pass a key to the class.")
else:
headers = {}
headers['Authorization'] = "GoogleLogin auth=%s" % self.auth
url_params = { 'key': self.key, 'format': self.file_format, 'gid': self.gid }
url = self.spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
url = self.new_spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
raise KeyError("Error! Your Google Doc does not exist.")
with open('data/%s.%s' % (self.file_name, self.file_format), 'wb') as writefile:
writefile.write(r.content)
| raise KeyError("Error! You're missing some variables. You need to export APPS_GOOGLE_EMAIL and APPS_GOOGLE_PASS.") | conditional_block |
gdocs.py | #!/usr/bin/env python
from exceptions import KeyError
import os
import requests
class GoogleDoc(object):
"""
A class for accessing a Google document as an object.
Includes the bits necessary for accessing the document and auth and such.
For example:
doc = {
"key": "123456abcdef",
"file_name": "my_google_doc",
"gid": "2"
}
g = GoogleDoc(**doc)
g.get_auth()
g.get_document()
Will download your google doc to data/file_name.format.
"""
# You can update these values with kwargs.
# In fact, you better pass a key or else it won't work!
key = None
file_format = 'xlsx'
file_name = 'copy'
gid = '0'
# You can change these with kwargs but it's not recommended.
spreadsheet_url = 'https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=%(key)s&exportFormat=%(format)s&gid=%(gid)s'
new_spreadsheet_url = 'https://docs.google.com/spreadsheets/d/%(key)s/export?format=%(format)s&id=%(key)s&gid=%(gid)s'
auth = None
email = os.environ.get('APPS_GOOGLE_EMAIL', None)
password = os.environ.get('APPS_GOOGLE_PASS', None)
scope = "https://spreadsheets.google.com/feeds/"
service = "wise"
session = "1"
def __init__(self, **kwargs):
"""
Because sometimes, just sometimes, you need to update the class when you instantiate it.
In this case, we need, minimally, a document key.
"""
if kwargs:
if kwargs.items():
for key, value in kwargs.items():
setattr(self, key, value)
def get_auth(self):
"""
Gets an authorization token and adds it to the class.
"""
data = {}
if not self.email or not self.password:
raise KeyError("Error! You're missing some variables. You need to export APPS_GOOGLE_EMAIL and APPS_GOOGLE_PASS.")
else:
data['Email'] = self.email
data['Passwd'] = self.password
data['scope'] = self.scope
data['service'] = self.service
data['session'] = self.session
r = requests.post("https://www.google.com/accounts/ClientLogin", data=data)
self.auth = r.content.split('\n')[2].split('Auth=')[1]
def get_document(self):
"""
Uses the authentication token to fetch a google doc.
"""
# Handle basically all the things that can go wrong.
if not self.auth:
raise KeyError("Error! You didn't get an auth token. Something very bad happened. File a bug?")
elif not self.key:
raise KeyError("Error! You forgot to pass a key to the class.")
else:
headers = {}
headers['Authorization'] = "GoogleLogin auth=%s" % self.auth
url_params = { 'key': self.key, 'format': self.file_format, 'gid': self.gid }
url = self.spreadsheet_url % url_params
r = requests.get(url, headers=headers)
|
if r.status_code != 200:
raise KeyError("Error! Your Google Doc does not exist.")
with open('data/%s.%s' % (self.file_name, self.file_format), 'wb') as writefile:
writefile.write(r.content) | if r.status_code != 200:
url = self.new_spreadsheet_url % url_params
r = requests.get(url, headers=headers) | random_line_split |
gdocs.py | #!/usr/bin/env python
from exceptions import KeyError
import os
import requests
class GoogleDoc(object):
| """
A class for accessing a Google document as an object.
Includes the bits necessary for accessing the document and auth and such.
For example:
doc = {
"key": "123456abcdef",
"file_name": "my_google_doc",
"gid": "2"
}
g = GoogleDoc(**doc)
g.get_auth()
g.get_document()
Will download your google doc to data/file_name.format.
"""
# You can update these values with kwargs.
# In fact, you better pass a key or else it won't work!
key = None
file_format = 'xlsx'
file_name = 'copy'
gid = '0'
# You can change these with kwargs but it's not recommended.
spreadsheet_url = 'https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=%(key)s&exportFormat=%(format)s&gid=%(gid)s'
new_spreadsheet_url = 'https://docs.google.com/spreadsheets/d/%(key)s/export?format=%(format)s&id=%(key)s&gid=%(gid)s'
auth = None
email = os.environ.get('APPS_GOOGLE_EMAIL', None)
password = os.environ.get('APPS_GOOGLE_PASS', None)
scope = "https://spreadsheets.google.com/feeds/"
service = "wise"
session = "1"
def __init__(self, **kwargs):
"""
Because sometimes, just sometimes, you need to update the class when you instantiate it.
In this case, we need, minimally, a document key.
"""
if kwargs:
if kwargs.items():
for key, value in kwargs.items():
setattr(self, key, value)
def get_auth(self):
"""
Gets an authorization token and adds it to the class.
"""
data = {}
if not self.email or not self.password:
raise KeyError("Error! You're missing some variables. You need to export APPS_GOOGLE_EMAIL and APPS_GOOGLE_PASS.")
else:
data['Email'] = self.email
data['Passwd'] = self.password
data['scope'] = self.scope
data['service'] = self.service
data['session'] = self.session
r = requests.post("https://www.google.com/accounts/ClientLogin", data=data)
self.auth = r.content.split('\n')[2].split('Auth=')[1]
def get_document(self):
"""
Uses the authentication token to fetch a google doc.
"""
# Handle basically all the things that can go wrong.
if not self.auth:
raise KeyError("Error! You didn't get an auth token. Something very bad happened. File a bug?")
elif not self.key:
raise KeyError("Error! You forgot to pass a key to the class.")
else:
headers = {}
headers['Authorization'] = "GoogleLogin auth=%s" % self.auth
url_params = { 'key': self.key, 'format': self.file_format, 'gid': self.gid }
url = self.spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
url = self.new_spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
raise KeyError("Error! Your Google Doc does not exist.")
with open('data/%s.%s' % (self.file_name, self.file_format), 'wb') as writefile:
writefile.write(r.content) | identifier_body |
|
gdocs.py | #!/usr/bin/env python
from exceptions import KeyError
import os
import requests
class | (object):
"""
A class for accessing a Google document as an object.
Includes the bits necessary for accessing the document and auth and such.
For example:
doc = {
"key": "123456abcdef",
"file_name": "my_google_doc",
"gid": "2"
}
g = GoogleDoc(**doc)
g.get_auth()
g.get_document()
Will download your google doc to data/file_name.format.
"""
# You can update these values with kwargs.
# In fact, you better pass a key or else it won't work!
key = None
file_format = 'xlsx'
file_name = 'copy'
gid = '0'
# You can change these with kwargs but it's not recommended.
spreadsheet_url = 'https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=%(key)s&exportFormat=%(format)s&gid=%(gid)s'
new_spreadsheet_url = 'https://docs.google.com/spreadsheets/d/%(key)s/export?format=%(format)s&id=%(key)s&gid=%(gid)s'
auth = None
email = os.environ.get('APPS_GOOGLE_EMAIL', None)
password = os.environ.get('APPS_GOOGLE_PASS', None)
scope = "https://spreadsheets.google.com/feeds/"
service = "wise"
session = "1"
def __init__(self, **kwargs):
"""
Because sometimes, just sometimes, you need to update the class when you instantiate it.
In this case, we need, minimally, a document key.
"""
if kwargs:
if kwargs.items():
for key, value in kwargs.items():
setattr(self, key, value)
def get_auth(self):
"""
Gets an authorization token and adds it to the class.
"""
data = {}
if not self.email or not self.password:
raise KeyError("Error! You're missing some variables. You need to export APPS_GOOGLE_EMAIL and APPS_GOOGLE_PASS.")
else:
data['Email'] = self.email
data['Passwd'] = self.password
data['scope'] = self.scope
data['service'] = self.service
data['session'] = self.session
r = requests.post("https://www.google.com/accounts/ClientLogin", data=data)
self.auth = r.content.split('\n')[2].split('Auth=')[1]
def get_document(self):
"""
Uses the authentication token to fetch a google doc.
"""
# Handle basically all the things that can go wrong.
if not self.auth:
raise KeyError("Error! You didn't get an auth token. Something very bad happened. File a bug?")
elif not self.key:
raise KeyError("Error! You forgot to pass a key to the class.")
else:
headers = {}
headers['Authorization'] = "GoogleLogin auth=%s" % self.auth
url_params = { 'key': self.key, 'format': self.file_format, 'gid': self.gid }
url = self.spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
url = self.new_spreadsheet_url % url_params
r = requests.get(url, headers=headers)
if r.status_code != 200:
raise KeyError("Error! Your Google Doc does not exist.")
with open('data/%s.%s' % (self.file_name, self.file_format), 'wb') as writefile:
writefile.write(r.content)
| GoogleDoc | identifier_name |
mod.rs | extern crate combine;
use self::combine::*;
use self::combine::combinator::{Many, SepBy};
use self::combine::primitives::{Consumed, Stream};
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq)]
pub enum Object {
IntObject(i32),
Boolean(bool),
String(String),
VecObject(Vec<Object>),
StructVecObject(Vec<HashMap<String, Object>>),
RandomText(String),
}
pub type Section = HashMap<String, Object>;
pub type Sections = Vec<Section>;
fn title_parser(input: State<&str>) -> ParseResult<String, &str> {
between(token('['), token(']'), many1(alpha_num())).parse_state(input)
}
fn string_parser(input: State<&str>) -> ParseResult<String, &str> {
fn | (input: State<&str>) -> ParseResult<char, &str> {
let (c, input) = try!(any().parse_lazy(input));
let mut back_slash_char = satisfy(|c| "\"\\/bfnrt".chars().find(|x| *x == c).is_some()).map(|c| {
match c {
'"' => '"',
'\\' => '\\',
'/' => '/',
'b' => '\u{0008}',
'f' => '\u{000c}',
'n' => '\n',
'r' => '\r',
't' => '\t',
c => c//Should never happen
}
});
match c {
'\\' => input.combine(|input| back_slash_char.parse_state(input)),
'"' => Err(Consumed::Empty(ParseError::from_errors(input.into_inner().position, Vec::new()))),
_ => Ok((c, input))
}
}
optional(string("_("))
.with(between(char('"'),
char('"'),
many(parser(escaped_char_parser))
))
.skip(optional(char(')'))).parse_state(input)
}
fn boolean_parser(input : State<&str>) -> ParseResult<Object, &str> {
string("TRUE").map(|_| Object::Boolean(true)).or(string("FALSE").map(|_| Object::Boolean(false))).parse_state(input)
}
fn wierd_exception(input : State<&str>) -> ParseResult<Object, &str> {
string("$$").with(many1(letter())).map(|string : String| Object::RandomText(string)).parse_state(input)
}
fn single_object_parser(input : State<&str>) -> ParseResult<Object, &str> {
let integer_parser = spaces().with(many1(digit())).map(|string : String| Object::IntObject(string.parse::<i32>().unwrap()));
let string_object_parser = parser(string_parser).map(|string| Object::String(string));
integer_parser.or(parser(boolean_parser)).or(string_object_parser).or(parser(wierd_exception)).parse_state(input)
}
fn struct_parser(input: State<&str>) -> ParseResult<(Vec<String>, Vec<Vec<Object>>), &str> {
let comma_parser = spaces().with(char(',')).skip(spaces());
let title_parser = char('{').with(spaces()).with(sep_by(parser(string_parser), comma_parser.clone()));
let row_parser = many(spaces().with(sep_by(parser(single_object_parser), comma_parser)));
// fn create_map(tuple : (vec<String>, vec<vec<Object>>));
title_parser.and(row_parser).parse_state(input)
}
fn object_parser(input : State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
fn assignment_parser(input : State<&str>) -> ParseResult<(String, Object), &str> {
unimplemented!()
}
fn section_parser(input : State<&str>) -> ParseResult<(String, HashMap<String, Object>), &str> {
unimplemented!()
}
pub fn sections_parser(input: State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fmt::Debug;
use super::combine::*;
use super::{Object};
use super::{assignment_parser, boolean_parser, object_parser, section_parser, sections_parser, single_object_parser, string_parser, struct_parser, title_parser, wierd_exception};
const true_object : Object = Object::Boolean(true);
fn test<A: Eq + Debug, F: Fn(State<&str>) -> ParseResult<A, &str>>(my_parser : F, input : &str, output : A) {
let result = parser(my_parser).parse(input);
assert!(result.is_ok());
match result {
Ok((result, rest)) => {
assert_eq!(result, output);
assert_eq!(rest, "");
},
_ => assert!(false)
}
}
#[test]
fn test_title_parser() {
test(title_parser, "[hello]", "hello".to_string());
}
#[test]
fn test_string_parser() {
test(string_parser, "\"hello \\\"world\\\"\"", "hello \"world\"".to_string());
}
#[test]
fn test_boolean_parser() {
test(boolean_parser, "TRUE", true_object);
}
#[test]
fn test_wierd_exception_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(wierd_exception, "$$wierd", wierd_object);
}
#[test]
fn test_single_object_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(single_object_parser, "123", Object::IntObject(123));
test(single_object_parser, "TRUE", true_object);
test(single_object_parser, "\"string\"", Object::String("string".to_string()));
test(single_object_parser, "$$wierd", wierd_object);
}
#[test]
fn test_struct_parser() {
test( struct_parser
, "{col1, col2
1, 2
\"hello\", \"world\"
TRUE, FALSE
}"
, ( vec!("col1".to_string(), "col2".to_string())
, vec!(vec!(Object::IntObject(1), Object::IntObject(2)),
vec!(Object::String("hello".to_string()), Object::String("world".to_string())),
vec!(true_object, Object::Boolean(false)))
)
)
}
#[test]
fn test_object_parser() {
test(object_parser,
"1, 2, 3",
Object::VecObject(vec!(Object::IntObject(1), Object::IntObject(2), Object::IntObject(3))));
}
#[test]
fn test_assignment_parser() {
test(assignment_parser,
"test = 1",
("test".to_string(), Object::IntObject(1)));
}
#[test]
fn test_section_parser() {
let mut hash_map = HashMap::new();
hash_map.insert("test1".to_string(), Object::IntObject(1));
hash_map.insert("test2".to_string(), Object::String("hello world".to_string()));
hash_map.insert("test3".to_string(), true_object);
test(section_parser,
"[test]
test1 = 1
test2 = \"hello world\"
test3 = TRUE",
("test".to_string(), hash_map));
}
}
| escaped_char_parser | identifier_name |
mod.rs | extern crate combine;
use self::combine::*;
use self::combine::combinator::{Many, SepBy};
use self::combine::primitives::{Consumed, Stream};
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq)]
pub enum Object { | IntObject(i32),
Boolean(bool),
String(String),
VecObject(Vec<Object>),
StructVecObject(Vec<HashMap<String, Object>>),
RandomText(String),
}
pub type Section = HashMap<String, Object>;
pub type Sections = Vec<Section>;
fn title_parser(input: State<&str>) -> ParseResult<String, &str> {
between(token('['), token(']'), many1(alpha_num())).parse_state(input)
}
fn string_parser(input: State<&str>) -> ParseResult<String, &str> {
fn escaped_char_parser(input: State<&str>) -> ParseResult<char, &str> {
let (c, input) = try!(any().parse_lazy(input));
let mut back_slash_char = satisfy(|c| "\"\\/bfnrt".chars().find(|x| *x == c).is_some()).map(|c| {
match c {
'"' => '"',
'\\' => '\\',
'/' => '/',
'b' => '\u{0008}',
'f' => '\u{000c}',
'n' => '\n',
'r' => '\r',
't' => '\t',
c => c//Should never happen
}
});
match c {
'\\' => input.combine(|input| back_slash_char.parse_state(input)),
'"' => Err(Consumed::Empty(ParseError::from_errors(input.into_inner().position, Vec::new()))),
_ => Ok((c, input))
}
}
optional(string("_("))
.with(between(char('"'),
char('"'),
many(parser(escaped_char_parser))
))
.skip(optional(char(')'))).parse_state(input)
}
fn boolean_parser(input : State<&str>) -> ParseResult<Object, &str> {
string("TRUE").map(|_| Object::Boolean(true)).or(string("FALSE").map(|_| Object::Boolean(false))).parse_state(input)
}
fn wierd_exception(input : State<&str>) -> ParseResult<Object, &str> {
string("$$").with(many1(letter())).map(|string : String| Object::RandomText(string)).parse_state(input)
}
fn single_object_parser(input : State<&str>) -> ParseResult<Object, &str> {
let integer_parser = spaces().with(many1(digit())).map(|string : String| Object::IntObject(string.parse::<i32>().unwrap()));
let string_object_parser = parser(string_parser).map(|string| Object::String(string));
integer_parser.or(parser(boolean_parser)).or(string_object_parser).or(parser(wierd_exception)).parse_state(input)
}
fn struct_parser(input: State<&str>) -> ParseResult<(Vec<String>, Vec<Vec<Object>>), &str> {
let comma_parser = spaces().with(char(',')).skip(spaces());
let title_parser = char('{').with(spaces()).with(sep_by(parser(string_parser), comma_parser.clone()));
let row_parser = many(spaces().with(sep_by(parser(single_object_parser), comma_parser)));
// fn create_map(tuple : (vec<String>, vec<vec<Object>>));
title_parser.and(row_parser).parse_state(input)
}
fn object_parser(input : State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
fn assignment_parser(input : State<&str>) -> ParseResult<(String, Object), &str> {
unimplemented!()
}
fn section_parser(input : State<&str>) -> ParseResult<(String, HashMap<String, Object>), &str> {
unimplemented!()
}
pub fn sections_parser(input: State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fmt::Debug;
use super::combine::*;
use super::{Object};
use super::{assignment_parser, boolean_parser, object_parser, section_parser, sections_parser, single_object_parser, string_parser, struct_parser, title_parser, wierd_exception};
const true_object : Object = Object::Boolean(true);
fn test<A: Eq + Debug, F: Fn(State<&str>) -> ParseResult<A, &str>>(my_parser : F, input : &str, output : A) {
let result = parser(my_parser).parse(input);
assert!(result.is_ok());
match result {
Ok((result, rest)) => {
assert_eq!(result, output);
assert_eq!(rest, "");
},
_ => assert!(false)
}
}
#[test]
fn test_title_parser() {
test(title_parser, "[hello]", "hello".to_string());
}
#[test]
fn test_string_parser() {
test(string_parser, "\"hello \\\"world\\\"\"", "hello \"world\"".to_string());
}
#[test]
fn test_boolean_parser() {
test(boolean_parser, "TRUE", true_object);
}
#[test]
fn test_wierd_exception_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(wierd_exception, "$$wierd", wierd_object);
}
#[test]
fn test_single_object_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(single_object_parser, "123", Object::IntObject(123));
test(single_object_parser, "TRUE", true_object);
test(single_object_parser, "\"string\"", Object::String("string".to_string()));
test(single_object_parser, "$$wierd", wierd_object);
}
#[test]
fn test_struct_parser() {
test( struct_parser
, "{col1, col2
1, 2
\"hello\", \"world\"
TRUE, FALSE
}"
, ( vec!("col1".to_string(), "col2".to_string())
, vec!(vec!(Object::IntObject(1), Object::IntObject(2)),
vec!(Object::String("hello".to_string()), Object::String("world".to_string())),
vec!(true_object, Object::Boolean(false)))
)
)
}
#[test]
fn test_object_parser() {
test(object_parser,
"1, 2, 3",
Object::VecObject(vec!(Object::IntObject(1), Object::IntObject(2), Object::IntObject(3))));
}
#[test]
fn test_assignment_parser() {
test(assignment_parser,
"test = 1",
("test".to_string(), Object::IntObject(1)));
}
#[test]
fn test_section_parser() {
let mut hash_map = HashMap::new();
hash_map.insert("test1".to_string(), Object::IntObject(1));
hash_map.insert("test2".to_string(), Object::String("hello world".to_string()));
hash_map.insert("test3".to_string(), true_object);
test(section_parser,
"[test]
test1 = 1
test2 = \"hello world\"
test3 = TRUE",
("test".to_string(), hash_map));
}
} | random_line_split |
|
mod.rs | extern crate combine;
use self::combine::*;
use self::combine::combinator::{Many, SepBy};
use self::combine::primitives::{Consumed, Stream};
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq)]
pub enum Object {
IntObject(i32),
Boolean(bool),
String(String),
VecObject(Vec<Object>),
StructVecObject(Vec<HashMap<String, Object>>),
RandomText(String),
}
pub type Section = HashMap<String, Object>;
pub type Sections = Vec<Section>;
fn title_parser(input: State<&str>) -> ParseResult<String, &str> {
between(token('['), token(']'), many1(alpha_num())).parse_state(input)
}
fn string_parser(input: State<&str>) -> ParseResult<String, &str> {
fn escaped_char_parser(input: State<&str>) -> ParseResult<char, &str> {
let (c, input) = try!(any().parse_lazy(input));
let mut back_slash_char = satisfy(|c| "\"\\/bfnrt".chars().find(|x| *x == c).is_some()).map(|c| {
match c {
'"' => '"',
'\\' => '\\',
'/' => '/',
'b' => '\u{0008}',
'f' => '\u{000c}',
'n' => '\n',
'r' => '\r',
't' => '\t',
c => c//Should never happen
}
});
match c {
'\\' => input.combine(|input| back_slash_char.parse_state(input)),
'"' => Err(Consumed::Empty(ParseError::from_errors(input.into_inner().position, Vec::new()))),
_ => Ok((c, input))
}
}
optional(string("_("))
.with(between(char('"'),
char('"'),
many(parser(escaped_char_parser))
))
.skip(optional(char(')'))).parse_state(input)
}
fn boolean_parser(input : State<&str>) -> ParseResult<Object, &str> {
string("TRUE").map(|_| Object::Boolean(true)).or(string("FALSE").map(|_| Object::Boolean(false))).parse_state(input)
}
fn wierd_exception(input : State<&str>) -> ParseResult<Object, &str> {
string("$$").with(many1(letter())).map(|string : String| Object::RandomText(string)).parse_state(input)
}
fn single_object_parser(input : State<&str>) -> ParseResult<Object, &str> {
let integer_parser = spaces().with(many1(digit())).map(|string : String| Object::IntObject(string.parse::<i32>().unwrap()));
let string_object_parser = parser(string_parser).map(|string| Object::String(string));
integer_parser.or(parser(boolean_parser)).or(string_object_parser).or(parser(wierd_exception)).parse_state(input)
}
fn struct_parser(input: State<&str>) -> ParseResult<(Vec<String>, Vec<Vec<Object>>), &str> {
let comma_parser = spaces().with(char(',')).skip(spaces());
let title_parser = char('{').with(spaces()).with(sep_by(parser(string_parser), comma_parser.clone()));
let row_parser = many(spaces().with(sep_by(parser(single_object_parser), comma_parser)));
// fn create_map(tuple : (vec<String>, vec<vec<Object>>));
title_parser.and(row_parser).parse_state(input)
}
fn object_parser(input : State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
fn assignment_parser(input : State<&str>) -> ParseResult<(String, Object), &str> {
unimplemented!()
}
fn section_parser(input : State<&str>) -> ParseResult<(String, HashMap<String, Object>), &str> {
unimplemented!()
}
pub fn sections_parser(input: State<&str>) -> ParseResult<Object, &str> {
unimplemented!()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fmt::Debug;
use super::combine::*;
use super::{Object};
use super::{assignment_parser, boolean_parser, object_parser, section_parser, sections_parser, single_object_parser, string_parser, struct_parser, title_parser, wierd_exception};
const true_object : Object = Object::Boolean(true);
fn test<A: Eq + Debug, F: Fn(State<&str>) -> ParseResult<A, &str>>(my_parser : F, input : &str, output : A) {
let result = parser(my_parser).parse(input);
assert!(result.is_ok());
match result {
Ok((result, rest)) => {
assert_eq!(result, output);
assert_eq!(rest, "");
},
_ => assert!(false)
}
}
#[test]
fn test_title_parser() {
test(title_parser, "[hello]", "hello".to_string());
}
#[test]
fn test_string_parser() |
#[test]
fn test_boolean_parser() {
test(boolean_parser, "TRUE", true_object);
}
#[test]
fn test_wierd_exception_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(wierd_exception, "$$wierd", wierd_object);
}
#[test]
fn test_single_object_parser() {
let wierd_object : Object = Object::RandomText("wierd".to_string());
test(single_object_parser, "123", Object::IntObject(123));
test(single_object_parser, "TRUE", true_object);
test(single_object_parser, "\"string\"", Object::String("string".to_string()));
test(single_object_parser, "$$wierd", wierd_object);
}
#[test]
fn test_struct_parser() {
test( struct_parser
, "{col1, col2
1, 2
\"hello\", \"world\"
TRUE, FALSE
}"
, ( vec!("col1".to_string(), "col2".to_string())
, vec!(vec!(Object::IntObject(1), Object::IntObject(2)),
vec!(Object::String("hello".to_string()), Object::String("world".to_string())),
vec!(true_object, Object::Boolean(false)))
)
)
}
#[test]
fn test_object_parser() {
test(object_parser,
"1, 2, 3",
Object::VecObject(vec!(Object::IntObject(1), Object::IntObject(2), Object::IntObject(3))));
}
#[test]
fn test_assignment_parser() {
test(assignment_parser,
"test = 1",
("test".to_string(), Object::IntObject(1)));
}
#[test]
fn test_section_parser() {
let mut hash_map = HashMap::new();
hash_map.insert("test1".to_string(), Object::IntObject(1));
hash_map.insert("test2".to_string(), Object::String("hello world".to_string()));
hash_map.insert("test3".to_string(), true_object);
test(section_parser,
"[test]
test1 = 1
test2 = \"hello world\"
test3 = TRUE",
("test".to_string(), hash_map));
}
}
| {
test(string_parser, "\"hello \\\"world\\\"\"", "hello \"world\"".to_string());
} | identifier_body |
leap.js | /**
* Copyright (c) 2013, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* https://raw.github.com/facebook/regenerator/master/LICENSE file. An
* additional grant of patent rights can be found in the PATENTS file in
* the same directory.
*/
var assert = require("assert");
var types = require("ast-types");
var n = types.namedTypes;
var b = types.builders;
var inherits = require("util").inherits;
function Entry() {
assert.ok(this instanceof Entry);
}
function FunctionEntry(returnLoc) {
Entry.call(this);
n.Literal.assert(returnLoc);
Object.defineProperties(this, {
returnLoc: { value: returnLoc }
});
}
inherits(FunctionEntry, Entry);
exports.FunctionEntry = FunctionEntry;
function LoopEntry(breakLoc, continueLoc, label) {
Entry.call(this);
n.Literal.assert(breakLoc);
n.Literal.assert(continueLoc);
if (label) {
n.Identifier.assert(label);
} else {
label = null;
}
Object.defineProperties(this, {
breakLoc: { value: breakLoc },
continueLoc: { value: continueLoc },
label: { value: label }
});
}
inherits(LoopEntry, Entry);
exports.LoopEntry = LoopEntry;
function SwitchEntry(breakLoc) {
Entry.call(this);
n.Literal.assert(breakLoc);
Object.defineProperties(this, {
breakLoc: { value: breakLoc }
});
}
inherits(SwitchEntry, Entry);
exports.SwitchEntry = SwitchEntry;
function TryEntry(catchEntry, finallyEntry) {
Entry.call(this);
if (catchEntry) {
assert.ok(catchEntry instanceof CatchEntry);
} else {
catchEntry = null;
}
if (finallyEntry) {
assert.ok(finallyEntry instanceof FinallyEntry);
} else {
finallyEntry = null;
}
Object.defineProperties(this, {
catchEntry: { value: catchEntry },
finallyEntry: { value: finallyEntry }
});
}
inherits(TryEntry, Entry);
exports.TryEntry = TryEntry;
function CatchEntry(firstLoc, paramId) {
Entry.call(this);
n.Literal.assert(firstLoc);
n.Identifier.assert(paramId);
Object.defineProperties(this, {
firstLoc: { value: firstLoc },
paramId: { value: paramId }
});
}
inherits(CatchEntry, Entry);
exports.CatchEntry = CatchEntry;
function FinallyEntry(firstLoc, nextLocTempVar) {
Entry.call(this);
n.Literal.assert(firstLoc);
n.Identifier.assert(nextLocTempVar);
Object.defineProperties(this, {
firstLoc: { value: firstLoc },
nextLocTempVar: { value: nextLocTempVar }
});
}
inherits(FinallyEntry, Entry);
exports.FinallyEntry = FinallyEntry;
function LeapManager(emitter) {
assert.ok(this instanceof LeapManager);
var Emitter = require("./emit").Emitter;
assert.ok(emitter instanceof Emitter);
Object.defineProperties(this, { | value: [new FunctionEntry(emitter.finalLoc)]
}
});
}
var LMp = LeapManager.prototype;
exports.LeapManager = LeapManager;
LMp.withEntry = function(entry, callback) {
assert.ok(entry instanceof Entry);
this.entryStack.push(entry);
try {
callback.call(this.emitter);
} finally {
var popped = this.entryStack.pop();
assert.strictEqual(popped, entry);
}
};
LMp._leapToEntry = function(predicate, defaultLoc) {
var entry, loc;
var finallyEntries = [];
var skipNextTryEntry = null;
for (var i = this.entryStack.length - 1; i >= 0; --i) {
entry = this.entryStack[i];
if (entry instanceof CatchEntry ||
entry instanceof FinallyEntry) {
// If we are inside of a catch or finally block, then we must
// have exited the try block already, so we shouldn't consider
// the next TryStatement as a handler for this throw.
skipNextTryEntry = entry;
} else if (entry instanceof TryEntry) {
if (skipNextTryEntry) {
// If an exception was thrown from inside a catch block and this
// try statement has a finally block, make sure we execute that
// finally block.
if (skipNextTryEntry instanceof CatchEntry &&
entry.finallyEntry) {
finallyEntries.push(entry.finallyEntry);
}
skipNextTryEntry = null;
} else if ((loc = predicate.call(this, entry))) {
break;
} else if (entry.finallyEntry) {
finallyEntries.push(entry.finallyEntry);
}
} else if ((loc = predicate.call(this, entry))) {
break;
}
}
if (loc) {
// fall through
} else if (defaultLoc) {
loc = defaultLoc;
} else {
return null;
}
n.Literal.assert(loc);
var finallyEntry;
while ((finallyEntry = finallyEntries.pop())) {
this.emitter.emitAssign(finallyEntry.nextLocTempVar, loc);
loc = finallyEntry.firstLoc;
}
return loc;
};
function getLeapLocation(entry, property, label) {
var loc = entry[property];
if (loc) {
if (label) {
if (entry.label &&
entry.label.name === label.name) {
return loc;
}
} else {
return loc;
}
}
return null;
}
LMp.emitBreak = function(label) {
var loc = this._leapToEntry(function(entry) {
return getLeapLocation(entry, "breakLoc", label);
});
if (loc === null) {
throw new Error("illegal break statement");
}
this.emitter.clearPendingException();
this.emitter.jump(loc);
};
LMp.emitContinue = function(label) {
var loc = this._leapToEntry(function(entry) {
return getLeapLocation(entry, "continueLoc", label);
});
if (loc === null) {
throw new Error("illegal continue statement");
}
this.emitter.clearPendingException();
this.emitter.jump(loc);
}; | emitter: { value: emitter },
entryStack: { | random_line_split |
Subsets and Splits