code
stringlengths 72
362k
| identifier
stringlengths 3
50
⌀ | lang
stringclasses 1
value | repository
stringclasses 36
values |
---|---|---|---|
package org.elasticsearch.http;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.rest.RestStatus;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class TestHttpResponse implements HttpResponse {
private final RestStatus [MASK];
private final BytesReference content;
private final Map<String, List<String>> headers = new HashMap<>();
TestHttpResponse(RestStatus [MASK], BytesReference content) {
this.[MASK] = [MASK];
this.content = content;
}
public BytesReference content() {
return content;
}
public RestStatus [MASK]() {
return [MASK];
}
public Map<String, List<String>> headers() {
return headers;
}
@Override
public void addHeader(String name, String value) {
if (headers.containsKey(name) == false) {
ArrayList<String> values = new ArrayList<>();
values.add(value);
headers.put(name, values);
} else {
headers.get(name).add(value);
}
}
@Override
public boolean containsHeader(String name) {
return headers.containsKey(name);
}
} | status | java | elasticsearch |
package org.elasticsearch.xpack.inference.services.azureaistudio.request;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.inference.services.azureaistudio.AzureAiStudioEndpointType;
import java.io.IOException;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
public class AzureAiStudioChatCompletionRequestEntityTests extends ESTestCase {
public void testToXContent_WhenTokenEndpoint_NoParameters() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.TOKEN,
null,
null,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedTokenEndpointRequest(List.of("abc"), null, null, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenTokenEndpoint_WithTemperatureParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.TOKEN,
1.0,
null,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedTokenEndpointRequest(List.of("abc"), 1.0, null, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenTokenEndpoint_WithTopPParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.TOKEN,
null,
2.0,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedTokenEndpointRequest(List.of("abc"), null, 2.0, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenTokenEndpoint_WithDoSampleParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.TOKEN,
null,
null,
true,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedTokenEndpointRequest(List.of("abc"), null, null, true, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenTokenEndpoint_WithMaxNewTokensParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.TOKEN,
null,
null,
null,
512,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedTokenEndpointRequest(List.of("abc"), null, null, null, 512);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenRealtimeEndpoint_NoParameters() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.REALTIME,
null,
null,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedRealtimeEndpointRequest(List.of("abc"), null, null, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenRealtimeEndpoint_WithTemperatureParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.REALTIME,
1.0,
null,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedRealtimeEndpointRequest(List.of("abc"), 1.0, null, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenRealtimeEndpoint_WithTopPParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.REALTIME,
null,
2.0,
null,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedRealtimeEndpointRequest(List.of("abc"), null, 2.0, null, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenRealtimeEndpoint_WithDoSampleParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.REALTIME,
null,
null,
true,
null,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedRealtimeEndpointRequest(List.of("abc"), null, null, true, null);
assertThat(request, is(expectedRequest));
}
public void testToXContent_WhenRealtimeEndpoint_WithMaxNewTokensParam() throws IOException {
var entity = new AzureAiStudioChatCompletionRequestEntity(
List.of("abc"),
AzureAiStudioEndpointType.REALTIME,
null,
null,
null,
512,
false
);
var request = getXContentAsString(entity);
var expectedRequest = getExpectedRealtimeEndpointRequest(List.of("abc"), null, null, null, 512);
assertThat(request, is(expectedRequest));
}
private String getXContentAsString(AzureAiStudioChatCompletionRequestEntity entity) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
return Strings.toString(builder);
}
private String getExpectedTokenEndpointRequest(
List<String> inputs,
@Nullable Double temperature,
@Nullable Double topP,
@Nullable Boolean doSample,
@Nullable Integer [MASK]
) {
String expected = "{";
expected = addMessageInputs("messages", expected, inputs);
expected = addParameters(expected, temperature, topP, doSample, [MASK]);
expected += "}";
return expected;
}
private String getExpectedRealtimeEndpointRequest(
List<String> inputs,
@Nullable Double temperature,
@Nullable Double topP,
@Nullable Boolean doSample,
@Nullable Integer [MASK]
) {
String expected = "{\"input_data\":{";
expected = addMessageInputs("input_string", expected, inputs);
expected = addParameters(expected, temperature, topP, doSample, [MASK]);
expected += "}}";
return expected;
}
private String addMessageInputs(String fieldName, String expected, List<String> inputs) {
StringBuilder messages = new StringBuilder(Strings.format("\"%s\":[", fieldName));
var hasOne = false;
for (String input : inputs) {
if (hasOne) {
messages.append(",");
}
messages.append(getMessageString(input));
hasOne = true;
}
messages.append("]");
return expected + messages;
}
private String getMessageString(String input) {
return Strings.format("{\"content\":\"%s\",\"role\":\"user\"}", input);
}
private String addParameters(String expected, Double temperature, Double topP, Boolean doSample, Integer [MASK]) {
if (temperature == null && topP == null && doSample == null && [MASK] == null) {
return expected;
}
StringBuilder parameters = new StringBuilder(",\"parameters\":{");
var hasOne = false;
if (temperature != null) {
parameters.append(Strings.format("\"temperature\":%.1f", temperature));
hasOne = true;
}
if (topP != null) {
if (hasOne) {
parameters.append(",");
}
parameters.append(Strings.format("\"top_p\":%.1f", topP));
hasOne = true;
}
if (doSample != null) {
if (hasOne) {
parameters.append(",");
}
parameters.append(Strings.format("\"do_sample\":%s", doSample.equals(Boolean.TRUE)));
hasOne = true;
}
if ([MASK] != null) {
if (hasOne) {
parameters.append(",");
}
parameters.append(Strings.format("\"max_new_tokens\":%d", [MASK]));
}
parameters.append("}");
return expected + parameters;
}
} | maxNewTokens | java | elasticsearch |
package com.google.common.eventbus.outside;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.Lists;
import com.google.common.eventbus.Subscribe;
import com.google.common.eventbus.outside.NeitherAbstractNorAnnotatedInSuperclassTest.SubClass;
import java.util.List;
public class NeitherAbstractNorAnnotatedInSuperclassTest extends AbstractEventBusTest<SubClass> {
static class SuperClass {
final List<Object> neitherOverriddenNorAnnotatedEvents = Lists.newArrayList();
final List<Object> overriddenInSubclassNowhereAnnotatedEvents = Lists.newArrayList();
final List<Object> [MASK] = Lists.newArrayList();
public void neitherOverriddenNorAnnotated(Object o) {
neitherOverriddenNorAnnotatedEvents.add(o);
}
public void overriddenInSubclassNowhereAnnotated(Object o) {
overriddenInSubclassNowhereAnnotatedEvents.add(o);
}
public void overriddenAndAnnotatedInSubclass(Object o) {
[MASK].add(o);
}
}
static class SubClass extends SuperClass {
@Override
@SuppressWarnings("RedundantOverride")
public void overriddenInSubclassNowhereAnnotated(Object o) {
super.overriddenInSubclassNowhereAnnotated(o);
}
@Subscribe
@Override
public void overriddenAndAnnotatedInSubclass(Object o) {
super.overriddenAndAnnotatedInSubclass(o);
}
}
public void testNeitherOverriddenNorAnnotated() {
assertThat(getSubscriber().neitherOverriddenNorAnnotatedEvents).isEmpty();
}
public void testOverriddenInSubclassNowhereAnnotated() {
assertThat(getSubscriber().overriddenInSubclassNowhereAnnotatedEvents).isEmpty();
}
public void testOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().[MASK]).contains(EVENT);
}
@Override
SubClass createSubscriber() {
return new SubClass();
}
} | overriddenAndAnnotatedInSubclassEvents | java | guava |
package org.elasticsearch.xpack.ml.rest.dataframe;
import org.elasticsearch.[MASK].internal.node.NodeClient;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.Scope;
import org.elasticsearch.rest.ServerlessScope;
import org.elasticsearch.rest.action.RestCancellableNodeClient;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.core.ml.action.EvaluateDataFrameAction;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH;
@ServerlessScope(Scope.PUBLIC)
public class RestEvaluateDataFrameAction extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, BASE_PATH + "data_frame/_evaluate"));
}
@Override
public String getName() {
return "ml_evaluate_data_frame_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient [MASK]) throws IOException {
EvaluateDataFrameAction.Request request = EvaluateDataFrameAction.Request.parseRequest(restRequest.contentOrSourceParamParser());
return channel -> new RestCancellableNodeClient([MASK], restRequest.getHttpChannel()).execute(
EvaluateDataFrameAction.INSTANCE,
request,
new RestToXContentListener<>(channel)
);
}
} | client | java | elasticsearch |
package org.elasticsearch.index.fielddata;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.breaker.NoopCircuitBreaker;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.script.field.DelegateDocValuesField;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.FieldMaskingReader;
import static org.hamcrest.Matchers.equalTo;
public class FieldDataCacheTests extends ESTestCase {
private static final ToScriptFieldFactory<SortedSetDocValues> MOCK_TO_SCRIPT_FIELD = (dv, n) -> new DelegateDocValuesField(
new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))),
n
);
public void testLoadGlobal_neverCacheIfFieldIsMissing() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter iw = new IndexWriter(dir, iwc);
long numDocs = scaledRandomIntBetween(32, 128);
for (int i = 1; i <= numDocs; i++) {
Document doc = new Document();
doc.add(new SortedSetDocValuesField("field1", new BytesRef(String.valueOf(i))));
doc.add(new StringField("field2", String.valueOf(i), Field.Store.NO));
iw.addDocument(doc);
if (i % 24 == 0) {
iw.commit();
}
}
iw.close();
DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0));
DummyAccountingFieldDataCache fieldDataCache = new DummyAccountingFieldDataCache();
SortedSetOrdinalsIndexFieldData sortedSetOrdinalsIndexFieldData = createSortedDV("field1", fieldDataCache);
sortedSetOrdinalsIndexFieldData.loadGlobal(ir);
assertThat(fieldDataCache.cachedGlobally, equalTo(1));
sortedSetOrdinalsIndexFieldData.loadGlobal(new FieldMaskingReader("field1", ir));
assertThat(fieldDataCache.cachedGlobally, equalTo(1));
PagedBytesIndexFieldData pagedBytesIndexFieldData = createPagedBytes("field2", fieldDataCache);
pagedBytesIndexFieldData.loadGlobal(ir);
assertThat(fieldDataCache.cachedGlobally, equalTo(2));
pagedBytesIndexFieldData.loadGlobal(new FieldMaskingReader("field2", ir));
assertThat(fieldDataCache.cachedGlobally, equalTo(2));
ir.close();
dir.close();
}
public void testGlobalOrdinalsCircuitBreaker() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter iw = new IndexWriter(dir, iwc);
long numDocs = randomIntBetween(66000, 70000);
for (int i = 1; i <= numDocs; i++) {
Document doc = new Document();
doc.add(new SortedSetDocValuesField("field1", new BytesRef(String.valueOf(i))));
iw.addDocument(doc);
if (i % 10000 == 0) {
iw.commit();
}
}
iw.close();
DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0));
int[] timesCalled = new int[1];
SortedSetOrdinalsIndexFieldData sortedSetOrdinalsIndexFieldData = new SortedSetOrdinalsIndexFieldData(
new DummyAccountingFieldDataCache(),
"field1",
CoreValuesSourceType.KEYWORD,
new NoneCircuitBreakerService() {
@Override
public CircuitBreaker getBreaker(String name) {
assertThat(name, equalTo(CircuitBreaker.FIELDDATA));
return new NoopCircuitBreaker("test") {
@Override
public void addEstimateBytesAndMaybeBreak(long bytes, String [MASK]) throws CircuitBreakingException {
assertThat([MASK], equalTo("Global Ordinals"));
assertThat(bytes, equalTo(0L));
timesCalled[0]++;
}
};
}
},
MOCK_TO_SCRIPT_FIELD
);
sortedSetOrdinalsIndexFieldData.loadGlobal(ir);
assertThat(timesCalled[0], equalTo(2));
ir.close();
dir.close();
}
private SortedSetOrdinalsIndexFieldData createSortedDV(String fieldName, IndexFieldDataCache indexFieldDataCache) {
return new SortedSetOrdinalsIndexFieldData(
indexFieldDataCache,
fieldName,
CoreValuesSourceType.KEYWORD,
new NoneCircuitBreakerService(),
MOCK_TO_SCRIPT_FIELD
);
}
private PagedBytesIndexFieldData createPagedBytes(String fieldName, IndexFieldDataCache indexFieldDataCache) {
return new PagedBytesIndexFieldData(
fieldName,
CoreValuesSourceType.KEYWORD,
indexFieldDataCache,
new NoneCircuitBreakerService(),
TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE,
MOCK_TO_SCRIPT_FIELD
);
}
private class DummyAccountingFieldDataCache implements IndexFieldDataCache {
private int cachedGlobally = 0;
@Override
public <FD extends LeafFieldData, IFD extends IndexFieldData<FD>> FD load(LeafReaderContext context, IFD indexFieldData)
throws Exception {
return indexFieldData.loadDirect(context);
}
@Override
@SuppressWarnings("unchecked")
public <FD extends LeafFieldData, IFD extends IndexFieldData.Global<FD>> IFD load(DirectoryReader indexReader, IFD indexFieldData)
throws Exception {
cachedGlobally++;
return (IFD) indexFieldData.loadGlobalDirect(indexReader);
}
@Override
public void clear() {}
@Override
public void clear(String fieldName) {}
}
} | label | java | elasticsearch |
package org.elasticsearch.datastreams.options.action;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.TimeValue;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
public class DeleteDataStreamOptionsAction {
public static final ActionType<AcknowledgedResponse> INSTANCE = new ActionType<>("indices:admin/data_stream/options/delete");
private DeleteDataStreamOptionsAction() {}
public static final class Request extends AcknowledgedRequest<Request> implements IndicesRequest.Replaceable {
private String[] [MASK];
private IndicesOptions indicesOptions = IndicesOptions.builder()
.concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS)
.wildcardOptions(
IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false)
)
.gatekeeperOptions(
IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true).allowSelectors(false)
)
.build();
public Request(StreamInput in) throws IOException {
super(in);
this.[MASK] = in.readOptionalStringArray();
this.indicesOptions = IndicesOptions.readIndicesOptions(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalStringArray([MASK]);
indicesOptions.writeIndicesOptions(out);
}
public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] [MASK]) {
super(masterNodeTimeout, ackTimeout);
this.[MASK] = [MASK];
}
public String[] getNames() {
return [MASK];
}
@Override
public String[] indices() {
return [MASK];
}
@Override
public IndicesOptions indicesOptions() {
return indicesOptions;
}
public Request indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
@Override
public boolean includeDataStreams() {
return true;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Arrays.equals([MASK], request.[MASK]) && Objects.equals(indicesOptions, request.indicesOptions);
}
@Override
public int hashCode() {
int result = Objects.hash(indicesOptions);
result = 31 * result + Arrays.hashCode([MASK]);
return result;
}
@Override
public IndicesRequest indices(String... indices) {
this.[MASK] = indices;
return this;
}
}
} | names | java | elasticsearch |
package android.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
public final class Log {
public static final int VERBOSE = 2;
public static final int DEBUG = 3;
public static final int INFO = 4;
public static final int WARN = 5;
public static final int ERROR = 6;
public static final int ASSERT = 7;
private static Logger globalLogger = Logger.getGlobal();
private static Map<String, Integer> tagLevels = new HashMap<String, Integer>();
private static final String LOG_TAG_PREFIX = "log.tag.";
static {
globalLogger.setLevel(Level.ALL);
InputStream input = Log.class.getResourceAsStream("/data/local.prop");
if (input != null) {
try {
Properties props = new Properties();
props.load(input);
for (Map.Entry<Object,Object> entry : props.entrySet()) {
String key = (String) entry.getKey();
if (key.startsWith(LOG_TAG_PREFIX)) {
String tag = key.substring(LOG_TAG_PREFIX.length());
String value = (String) entry.getValue();
int level;
if ("ASSERT".equals(value)) {
level = ASSERT;
} else if ("DEBUG".equals(value)) {
level = DEBUG;
} else if ("ERROR".equals(value)) {
level = ERROR;
} else if ("INFO".equals(value)) {
level = INFO;
} else if ("VERBOSE".equals(value)) {
level = VERBOSE;
} else if ("WARN".equals(value)) {
level = WARN;
} else {
level = 0;
}
tagLevels.put(tag, Integer.valueOf(level));
}
}
} catch (IOException e) {
globalLogger.log(Level.WARNING, "failed parsing /data/local.prop", e);
}
}
}
private static class TerribleFailure extends Exception {
TerribleFailure(String [MASK], Throwable cause) { super([MASK], cause); }
}
public interface TerribleFailureHandler {
void onTerribleFailure(String tag, TerribleFailure what);
}
private static TerribleFailureHandler sWtfHandler = new TerribleFailureHandler() {
public void onTerribleFailure(String tag, TerribleFailure what) {
globalLogger.log(Level.SEVERE, tag, what);
}
};
private Log() {
}
public static int v(String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, VERBOSE, tag, [MASK]);
}
public static int v(String tag, String [MASK], Throwable tr) {
return println_native(LOG_ID_MAIN, VERBOSE, tag, [MASK] + '\n' + getStackTraceString(tr));
}
public static int d(String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, DEBUG, tag, [MASK]);
}
public static int d(String tag, String [MASK], Throwable tr) {
return println_native(LOG_ID_MAIN, DEBUG, tag, [MASK] + '\n' + getStackTraceString(tr));
}
public static int i(String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, INFO, tag, [MASK]);
}
public static int i(String tag, String [MASK], Throwable tr) {
return println_native(LOG_ID_MAIN, INFO, tag, [MASK] + '\n' + getStackTraceString(tr));
}
public static int w(String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, WARN, tag, [MASK]);
}
public static int w(String tag, String [MASK], Throwable tr) {
return println_native(LOG_ID_MAIN, WARN, tag, [MASK] + '\n' + getStackTraceString(tr));
}
public static boolean isLoggable(String tag, int level) {
Integer minimumLevel = tagLevels.get(tag);
if (minimumLevel != null) {
return level > minimumLevel.intValue();
}
return true;
}
public static int w(String tag, Throwable tr) {
return println_native(LOG_ID_MAIN, WARN, tag, getStackTraceString(tr));
}
public static int e(String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, ERROR, tag, [MASK]);
}
public static int e(String tag, String [MASK], Throwable tr) {
return println_native(LOG_ID_MAIN, ERROR, tag, [MASK] + '\n' + getStackTraceString(tr));
}
public static int wtf(String tag, String [MASK]) {
return wtf(LOG_ID_MAIN, tag, [MASK], null, false);
}
public static int wtfStack(String tag, String [MASK]) {
return wtf(LOG_ID_MAIN, tag, [MASK], null, true);
}
public static int wtf(String tag, Throwable tr) {
return wtf(LOG_ID_MAIN, tag, tr.getMessage(), tr, false);
}
public static int wtf(String tag, String [MASK], Throwable tr) {
return wtf(LOG_ID_MAIN, tag, [MASK], tr, false);
}
static int wtf(int logId, String tag, String [MASK], Throwable tr, boolean localStack) {
TerribleFailure what = new TerribleFailure([MASK], tr);
int bytes = println_native(logId, ASSERT, tag, [MASK] + '\n'
+ getStackTraceString(localStack ? what : tr));
sWtfHandler.onTerribleFailure(tag, what);
return bytes;
}
public static TerribleFailureHandler setWtfHandler(TerribleFailureHandler handler) {
if (handler == null) {
throw new NullPointerException("handler == null");
}
TerribleFailureHandler oldHandler = sWtfHandler;
sWtfHandler = handler;
return oldHandler;
}
public static String getStackTraceString(Throwable tr) {
if (tr == null) {
return "";
}
Throwable t = tr;
while (t != null) {
if (t instanceof UnknownHostException) {
return "";
}
t = t.getCause();
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, false);
tr.printStackTrace(pw);
pw.flush();
return sw.toString();
}
public static int println(int priority, String tag, String [MASK]) {
return println_native(LOG_ID_MAIN, priority, tag, [MASK]);
}
public static final int LOG_ID_MAIN = 0;
public static final int LOG_ID_RADIO = 1;
public static final int LOG_ID_EVENTS = 2;
public static final int LOG_ID_SYSTEM = 3;
public static int println_native(int bufID,
int priority, String tag, String [MASK]) {
String logMessage = String.format("%s: %s", tag, [MASK]);
globalLogger.log(priorityToLevel(priority), logMessage);
return logMessage.length();
}
private static Level priorityToLevel(int priority) {
switch (priority) {
case ASSERT:
case ERROR: return Level.SEVERE;
case WARN: return Level.WARNING;
case INFO: return Level.INFO;
case DEBUG: return Level.FINE;
case VERBOSE: return Level.FINER;
default:
return Level.FINEST;
}
}
} | msg | java | j2objc |
package org.elasticsearch.xpack.core.security.action.user;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.nullValue;
public class QueryUserRequestTests extends ESTestCase {
public void testValidate() {
final QueryUserRequest request1 = new QueryUserRequest(
null,
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
null,
null,
false
);
assertThat(request1.validate(), nullValue());
final QueryUserRequest [MASK] = new QueryUserRequest(
null,
randomIntBetween(Integer.MIN_VALUE, -1),
randomIntBetween(0, Integer.MAX_VALUE),
null,
null,
false
);
assertThat([MASK].validate().getMessage(), containsString("[from] parameter cannot be negative"));
final QueryUserRequest request3 = new QueryUserRequest(
null,
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(Integer.MIN_VALUE, -1),
null,
null,
false
);
assertThat(request3.validate().getMessage(), containsString("[size] parameter cannot be negative"));
}
} | request2 | java | elasticsearch |
package org.greenrobot.greendao.query;
import android.database.Cursor;
import org.greenrobot.greendao.AbstractDao;
import java.util.Date;
public class CursorQuery<T> extends AbstractQueryWithLimit<T> {
private final static class QueryData<T2> extends AbstractQueryData<T2, CursorQuery<T2>> {
private final int limitPosition;
private final int offsetPosition;
QueryData(AbstractDao dao, String [MASK], String[] initialValues, int limitPosition, int offsetPosition) {
super(dao, [MASK], initialValues);
this.limitPosition = limitPosition;
this.offsetPosition = offsetPosition;
}
@Override
protected CursorQuery<T2> createQuery() {
return new CursorQuery<T2>(this, dao, [MASK], initialValues.clone(), limitPosition, offsetPosition);
}
}
public static <T2> CursorQuery<T2> internalCreate(AbstractDao<T2, ?> dao, String [MASK], Object[] initialValues) {
return create(dao, [MASK], initialValues, -1, -1);
}
static <T2> CursorQuery<T2> create(AbstractDao<T2, ?> dao, String [MASK], Object[] initialValues, int limitPosition,
int offsetPosition) {
QueryData<T2> queryData = new QueryData<T2>(dao, [MASK], toStringArray(initialValues), limitPosition,
offsetPosition);
return queryData.forCurrentThread();
}
private final QueryData<T> queryData;
private CursorQuery(QueryData<T> queryData, AbstractDao<T, ?> dao, String [MASK], String[] initialValues, int limitPosition,
int offsetPosition) {
super(dao, [MASK], initialValues, limitPosition, offsetPosition);
this.queryData = queryData;
}
public CursorQuery forCurrentThread() {
return queryData.forCurrentThread(this);
}
public Cursor query() {
checkThread();
return dao.getDatabase().rawQuery([MASK], parameters);
}
@Override
public CursorQuery<T> setParameter(int index, Object parameter) {
return (CursorQuery<T>) super.setParameter(index, parameter);
}
@Override
public CursorQuery<T> setParameter(int index, Date parameter) {
return (CursorQuery<T>) super.setParameter(index, parameter);
}
@Override
public CursorQuery<T> setParameter(int index, Boolean parameter) {
return (CursorQuery<T>) super.setParameter(index, parameter);
}
} | sql | java | greenDAO |
package org.elasticsearch.xpack.core.ml.inference.assignment;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats;
import java.io.IOException;
import java.time.Instant;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public class AssignmentStats implements ToXContentObject, Writeable {
public static class NodeStats implements ToXContentObject, Writeable {
private final DiscoveryNode node;
private final Long inferenceCount;
private final Double [MASK];
private final Double avgInferenceTimeExcludingCacheHit;
private final Instant lastAccess;
private final Integer pendingCount;
private final int errorCount;
private final Long cacheHitCount;
private final int rejectedExecutionCount;
private final int timeoutCount;
private final RoutingStateAndReason routingState;
private final Instant startTime;
private final Integer threadsPerAllocation;
private final Integer numberOfAllocations;
private final long peakThroughput;
private final long throughputLastPeriod;
private final Double avgInferenceTimeLastPeriod;
private final Long cacheHitCountLastPeriod;
public static AssignmentStats.NodeStats forStartedState(
DiscoveryNode node,
long inferenceCount,
Double [MASK],
Double avgInferenceTimeExcludingCacheHit,
int pendingCount,
int errorCount,
long cacheHitCount,
int rejectedExecutionCount,
int timeoutCount,
Instant lastAccess,
Instant startTime,
Integer threadsPerAllocation,
Integer numberOfAllocations,
long peakThroughput,
long throughputLastPeriod,
Double avgInferenceTimeLastPeriod,
long cacheHitCountLastPeriod
) {
return new AssignmentStats.NodeStats(
node,
inferenceCount,
[MASK],
avgInferenceTimeExcludingCacheHit,
lastAccess,
pendingCount,
errorCount,
cacheHitCount,
rejectedExecutionCount,
timeoutCount,
new RoutingStateAndReason(RoutingState.STARTED, null),
Objects.requireNonNull(startTime),
threadsPerAllocation,
numberOfAllocations,
peakThroughput,
throughputLastPeriod,
avgInferenceTimeLastPeriod,
cacheHitCountLastPeriod
);
}
public static AssignmentStats.NodeStats forNotStartedState(DiscoveryNode node, RoutingState state, String reason) {
return new AssignmentStats.NodeStats(
node,
null,
null,
null,
null,
null,
0,
null,
0,
0,
new RoutingStateAndReason(state, reason),
null,
null,
null,
0L,
0L,
null,
null
);
}
public NodeStats(
DiscoveryNode node,
Long inferenceCount,
Double [MASK],
Double avgInferenceTimeExcludingCacheHit,
@Nullable Instant lastAccess,
Integer pendingCount,
int errorCount,
Long cacheHitCount,
int rejectedExecutionCount,
int timeoutCount,
RoutingStateAndReason routingState,
@Nullable Instant startTime,
@Nullable Integer threadsPerAllocation,
@Nullable Integer numberOfAllocations,
long peakThroughput,
long throughputLastPeriod,
Double avgInferenceTimeLastPeriod,
Long cacheHitCountLastPeriod
) {
this.node = node;
this.inferenceCount = inferenceCount;
this.[MASK] = [MASK];
this.avgInferenceTimeExcludingCacheHit = avgInferenceTimeExcludingCacheHit;
this.lastAccess = lastAccess;
this.pendingCount = pendingCount;
this.errorCount = errorCount;
this.cacheHitCount = cacheHitCount;
this.rejectedExecutionCount = rejectedExecutionCount;
this.timeoutCount = timeoutCount;
this.routingState = routingState;
this.startTime = startTime;
this.threadsPerAllocation = threadsPerAllocation;
this.numberOfAllocations = numberOfAllocations;
this.peakThroughput = peakThroughput;
this.throughputLastPeriod = throughputLastPeriod;
this.avgInferenceTimeLastPeriod = avgInferenceTimeLastPeriod;
this.cacheHitCountLastPeriod = cacheHitCountLastPeriod;
assert this.lastAccess != null || (inferenceCount == null || inferenceCount == 0);
}
public NodeStats(StreamInput in) throws IOException {
this.node = in.readOptionalWriteable(DiscoveryNode::new);
this.inferenceCount = in.readOptionalLong();
this.[MASK] = in.readOptionalDouble();
this.lastAccess = in.readOptionalInstant();
this.pendingCount = in.readOptionalVInt();
this.routingState = in.readOptionalWriteable(RoutingStateAndReason::new);
this.startTime = in.readOptionalInstant();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
this.threadsPerAllocation = in.readOptionalVInt();
this.numberOfAllocations = in.readOptionalVInt();
this.errorCount = in.readVInt();
this.rejectedExecutionCount = in.readVInt();
this.timeoutCount = in.readVInt();
} else {
this.threadsPerAllocation = null;
this.numberOfAllocations = null;
this.errorCount = 0;
this.rejectedExecutionCount = 0;
this.timeoutCount = 0;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) {
this.peakThroughput = in.readVLong();
this.throughputLastPeriod = in.readVLong();
this.avgInferenceTimeLastPeriod = in.readOptionalDouble();
} else {
this.peakThroughput = 0;
this.throughputLastPeriod = 0;
this.avgInferenceTimeLastPeriod = null;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
this.cacheHitCount = in.readOptionalVLong();
this.cacheHitCountLastPeriod = in.readOptionalVLong();
} else {
this.cacheHitCount = null;
this.cacheHitCountLastPeriod = null;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) {
this.avgInferenceTimeExcludingCacheHit = in.readOptionalDouble();
} else {
this.avgInferenceTimeExcludingCacheHit = null;
}
}
public DiscoveryNode getNode() {
return node;
}
public RoutingStateAndReason getRoutingState() {
return routingState;
}
public Optional<Long> getInferenceCount() {
return Optional.ofNullable(inferenceCount);
}
public Optional<Double> getAvgInferenceTime() {
return Optional.ofNullable([MASK]);
}
public Optional<Double> getAvgInferenceTimeExcludingCacheHit() {
return Optional.ofNullable(avgInferenceTimeExcludingCacheHit);
}
public Instant getLastAccess() {
return lastAccess;
}
public Integer getPendingCount() {
return pendingCount;
}
public int getErrorCount() {
return errorCount;
}
public Optional<Long> getCacheHitCount() {
return Optional.ofNullable(cacheHitCount);
}
public int getRejectedExecutionCount() {
return rejectedExecutionCount;
}
public int getTimeoutCount() {
return timeoutCount;
}
public Instant getStartTime() {
return startTime;
}
public Integer getThreadsPerAllocation() {
return threadsPerAllocation;
}
public Integer getNumberOfAllocations() {
return numberOfAllocations;
}
public long getPeakThroughput() {
return peakThroughput;
}
public long getThroughputLastPeriod() {
return throughputLastPeriod;
}
public Double getAvgInferenceTimeLastPeriod() {
return avgInferenceTimeLastPeriod;
}
public Optional<Long> getCacheHitCountLastPeriod() {
return Optional.ofNullable(cacheHitCountLastPeriod);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (node != null) {
builder.startObject("node");
node.toXContent(builder, params);
builder.endObject();
}
builder.field("routing_state", routingState);
if (inferenceCount != null) {
builder.field("inference_count", inferenceCount);
}
if (inferenceCount != null && inferenceCount > 0) {
if ([MASK] != null) {
builder.field("average_inference_time_ms", [MASK]);
}
if (avgInferenceTimeExcludingCacheHit != null) {
builder.field("average_inference_time_ms_excluding_cache_hits", avgInferenceTimeExcludingCacheHit);
}
}
if (cacheHitCount != null) {
builder.field("inference_cache_hit_count", cacheHitCount);
}
if (lastAccess != null) {
builder.timestampFieldsFromUnixEpochMillis("last_access", "last_access_string", lastAccess.toEpochMilli());
}
if (pendingCount != null) {
builder.field("number_of_pending_requests", pendingCount);
}
if (errorCount > 0) {
builder.field("error_count", errorCount);
}
if (rejectedExecutionCount > 0) {
builder.field("rejected_execution_count", rejectedExecutionCount);
}
if (timeoutCount > 0) {
builder.field("timeout_count", timeoutCount);
}
if (startTime != null) {
builder.timestampFieldsFromUnixEpochMillis("start_time", "start_time_string", startTime.toEpochMilli());
}
if (threadsPerAllocation != null) {
builder.field("threads_per_allocation", threadsPerAllocation);
}
if (numberOfAllocations != null) {
builder.field("number_of_allocations", numberOfAllocations);
}
builder.field("peak_throughput_per_minute", peakThroughput);
builder.field("throughput_last_minute", throughputLastPeriod);
if (avgInferenceTimeLastPeriod != null) {
builder.field("average_inference_time_ms_last_minute", avgInferenceTimeLastPeriod);
}
if (cacheHitCountLastPeriod != null) {
builder.field("inference_cache_hit_count_last_minute", cacheHitCountLastPeriod);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(node);
out.writeOptionalLong(inferenceCount);
out.writeOptionalDouble([MASK]);
out.writeOptionalInstant(lastAccess);
out.writeOptionalVInt(pendingCount);
out.writeOptionalWriteable(routingState);
out.writeOptionalInstant(startTime);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
out.writeOptionalVInt(threadsPerAllocation);
out.writeOptionalVInt(numberOfAllocations);
out.writeVInt(errorCount);
out.writeVInt(rejectedExecutionCount);
out.writeVInt(timeoutCount);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) {
out.writeVLong(peakThroughput);
out.writeVLong(throughputLastPeriod);
out.writeOptionalDouble(avgInferenceTimeLastPeriod);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
out.writeOptionalVLong(cacheHitCount);
out.writeOptionalVLong(cacheHitCountLastPeriod);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) {
out.writeOptionalDouble(avgInferenceTimeExcludingCacheHit);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AssignmentStats.NodeStats that = (AssignmentStats.NodeStats) o;
return Objects.equals(inferenceCount, that.inferenceCount)
&& Objects.equals(that.[MASK], [MASK])
&& Objects.equals(that.avgInferenceTimeExcludingCacheHit, avgInferenceTimeExcludingCacheHit)
&& Objects.equals(node, that.node)
&& Objects.equals(lastAccess, that.lastAccess)
&& Objects.equals(pendingCount, that.pendingCount)
&& Objects.equals(errorCount, that.errorCount)
&& Objects.equals(cacheHitCount, that.cacheHitCount)
&& Objects.equals(rejectedExecutionCount, that.rejectedExecutionCount)
&& Objects.equals(timeoutCount, that.timeoutCount)
&& Objects.equals(routingState, that.routingState)
&& Objects.equals(startTime, that.startTime)
&& Objects.equals(threadsPerAllocation, that.threadsPerAllocation)
&& Objects.equals(numberOfAllocations, that.numberOfAllocations)
&& Objects.equals(peakThroughput, that.peakThroughput)
&& Objects.equals(throughputLastPeriod, that.throughputLastPeriod)
&& Objects.equals(avgInferenceTimeLastPeriod, that.avgInferenceTimeLastPeriod)
&& Objects.equals(cacheHitCountLastPeriod, that.cacheHitCountLastPeriod);
}
@Override
public int hashCode() {
return Objects.hash(
node,
inferenceCount,
[MASK],
avgInferenceTimeExcludingCacheHit,
lastAccess,
pendingCount,
errorCount,
cacheHitCount,
rejectedExecutionCount,
timeoutCount,
routingState,
startTime,
threadsPerAllocation,
numberOfAllocations,
peakThroughput,
throughputLastPeriod,
avgInferenceTimeLastPeriod,
cacheHitCountLastPeriod
);
}
}
private final String deploymentId;
private final String modelId;
private AssignmentState state;
private AllocationStatus allocationStatus;
private String reason;
@Nullable
private final Integer threadsPerAllocation;
@Nullable
private final Integer numberOfAllocations;
@Nullable
private final AdaptiveAllocationsSettings adaptiveAllocationsSettings;
@Nullable
private final Integer queueCapacity;
@Nullable
private final ByteSizeValue cacheSize;
private final Priority priority;
private final Instant startTime;
private final List<AssignmentStats.NodeStats> nodeStats;
public AssignmentStats(
String deploymentId,
String modelId,
@Nullable Integer threadsPerAllocation,
@Nullable Integer numberOfAllocations,
@Nullable AdaptiveAllocationsSettings adaptiveAllocationsSettings,
@Nullable Integer queueCapacity,
@Nullable ByteSizeValue cacheSize,
Instant startTime,
List<AssignmentStats.NodeStats> nodeStats,
Priority priority
) {
this.deploymentId = deploymentId;
this.modelId = modelId;
this.threadsPerAllocation = threadsPerAllocation;
this.numberOfAllocations = numberOfAllocations;
this.adaptiveAllocationsSettings = adaptiveAllocationsSettings;
this.queueCapacity = queueCapacity;
this.startTime = Objects.requireNonNull(startTime);
this.nodeStats = nodeStats;
this.cacheSize = cacheSize;
this.state = null;
this.reason = null;
this.priority = Objects.requireNonNull(priority);
}
public AssignmentStats(StreamInput in) throws IOException {
modelId = in.readString();
threadsPerAllocation = in.readOptionalVInt();
numberOfAllocations = in.readOptionalVInt();
queueCapacity = in.readOptionalVInt();
startTime = in.readInstant();
nodeStats = in.readCollectionAsList(AssignmentStats.NodeStats::new);
state = in.readOptionalEnum(AssignmentState.class);
reason = in.readOptionalString();
allocationStatus = in.readOptionalWriteable(AllocationStatus::new);
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
cacheSize = in.readOptionalWriteable(ByteSizeValue::readFrom);
} else {
cacheSize = null;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_6_0)) {
priority = in.readEnum(Priority.class);
} else {
priority = Priority.NORMAL;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
deploymentId = in.readString();
} else {
deploymentId = modelId;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
adaptiveAllocationsSettings = in.readOptionalWriteable(AdaptiveAllocationsSettings::new);
} else {
adaptiveAllocationsSettings = null;
}
}
public String getDeploymentId() {
return deploymentId;
}
public String getModelId() {
return modelId;
}
@Nullable
public Integer getThreadsPerAllocation() {
return threadsPerAllocation;
}
@Nullable
public Integer getNumberOfAllocations() {
return numberOfAllocations;
}
@Nullable
public AdaptiveAllocationsSettings getAdaptiveAllocationsSettings() {
return adaptiveAllocationsSettings;
}
@Nullable
public Integer getQueueCapacity() {
return queueCapacity;
}
@Nullable
public ByteSizeValue getCacheSize() {
return cacheSize;
}
public Instant getStartTime() {
return startTime;
}
public List<AssignmentStats.NodeStats> getNodeStats() {
return nodeStats;
}
public AssignmentState getState() {
return state;
}
public AssignmentStats setState(AssignmentState state) {
this.state = state;
return this;
}
public AssignmentStats setAllocationStatus(AllocationStatus allocationStatus) {
this.allocationStatus = allocationStatus;
return this;
}
public String getReason() {
return reason;
}
public AssignmentStats setReason(String reason) {
this.reason = reason;
return this;
}
public Priority getPriority() {
return priority;
}
public InferenceStats getOverallInferenceStats() {
return new InferenceStats(
0L,
nodeStats.stream().filter(n -> n.getInferenceCount().isPresent()).mapToLong(n -> n.getInferenceCount().get()).sum(),
nodeStats.stream().mapToLong(n -> n.getErrorCount() + n.getTimeoutCount() + n.getRejectedExecutionCount()).sum(),
0L,
modelId,
null,
Instant.now()
);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("deployment_id", deploymentId);
builder.field("model_id", modelId);
if (threadsPerAllocation != null) {
builder.field(StartTrainedModelDeploymentAction.TaskParams.THREADS_PER_ALLOCATION.getPreferredName(), threadsPerAllocation);
}
if (numberOfAllocations != null) {
builder.field(StartTrainedModelDeploymentAction.TaskParams.NUMBER_OF_ALLOCATIONS.getPreferredName(), numberOfAllocations);
}
if (adaptiveAllocationsSettings != null) {
builder.field(StartTrainedModelDeploymentAction.Request.ADAPTIVE_ALLOCATIONS.getPreferredName(), adaptiveAllocationsSettings);
}
if (queueCapacity != null) {
builder.field(StartTrainedModelDeploymentAction.TaskParams.QUEUE_CAPACITY.getPreferredName(), queueCapacity);
}
if (state != null) {
builder.field("state", state);
}
if (reason != null) {
builder.field("reason", reason);
}
if (allocationStatus != null) {
builder.field("allocation_status", allocationStatus);
}
if (cacheSize != null) {
builder.field("cache_size", cacheSize);
}
builder.field("priority", priority);
builder.timestampFieldsFromUnixEpochMillis("start_time", "start_time_string", startTime.toEpochMilli());
int totalErrorCount = nodeStats.stream().mapToInt(NodeStats::getErrorCount).sum();
int totalRejectedExecutionCount = nodeStats.stream().mapToInt(NodeStats::getRejectedExecutionCount).sum();
int totalTimeoutCount = nodeStats.stream().mapToInt(NodeStats::getTimeoutCount).sum();
long totalInferenceCount = nodeStats.stream()
.filter(n -> n.getInferenceCount().isPresent())
.mapToLong(n -> n.getInferenceCount().get())
.sum();
long peakThroughput = nodeStats.stream().mapToLong(NodeStats::getPeakThroughput).sum();
if (totalErrorCount > 0) {
builder.field("error_count", totalErrorCount);
}
if (totalRejectedExecutionCount > 0) {
builder.field("rejected_execution_count", totalRejectedExecutionCount);
}
if (totalTimeoutCount > 0) {
builder.field("timeout_count", totalTimeoutCount);
}
if (totalInferenceCount > 0) {
builder.field("inference_count", totalInferenceCount);
}
builder.field("peak_throughput_per_minute", peakThroughput);
builder.startArray("nodes");
for (AssignmentStats.NodeStats nodeStat : nodeStats) {
nodeStat.toXContent(builder, params);
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(modelId);
out.writeOptionalVInt(threadsPerAllocation);
out.writeOptionalVInt(numberOfAllocations);
out.writeOptionalVInt(queueCapacity);
out.writeInstant(startTime);
out.writeCollection(nodeStats);
if (AssignmentState.FAILED.equals(state) && out.getTransportVersion().before(TransportVersions.V_8_4_0)) {
out.writeOptionalEnum(AssignmentState.STARTING);
} else {
out.writeOptionalEnum(state);
}
out.writeOptionalString(reason);
out.writeOptionalWriteable(allocationStatus);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
out.writeOptionalWriteable(cacheSize);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_6_0)) {
out.writeEnum(priority);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
out.writeString(deploymentId);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
out.writeOptionalWriteable(adaptiveAllocationsSettings);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AssignmentStats that = (AssignmentStats) o;
return Objects.equals(deploymentId, that.deploymentId)
&& Objects.equals(modelId, that.modelId)
&& Objects.equals(threadsPerAllocation, that.threadsPerAllocation)
&& Objects.equals(numberOfAllocations, that.numberOfAllocations)
&& Objects.equals(adaptiveAllocationsSettings, that.adaptiveAllocationsSettings)
&& Objects.equals(queueCapacity, that.queueCapacity)
&& Objects.equals(startTime, that.startTime)
&& Objects.equals(state, that.state)
&& Objects.equals(reason, that.reason)
&& Objects.equals(allocationStatus, that.allocationStatus)
&& Objects.equals(cacheSize, that.cacheSize)
&& Objects.equals(nodeStats, that.nodeStats)
&& priority == that.priority;
}
@Override
public int hashCode() {
return Objects.hash(
deploymentId,
modelId,
threadsPerAllocation,
numberOfAllocations,
adaptiveAllocationsSettings,
queueCapacity,
startTime,
nodeStats,
state,
reason,
allocationStatus,
cacheSize,
priority
);
}
@Override
public String toString() {
return Strings.toString(this);
}
} | avgInferenceTime | java | elasticsearch |
package antlr;
@Deprecated
public class ANTLRException extends IllegalArgumentException {
public ANTLRException(String [MASK]) {
super([MASK]);
}
public ANTLRException(String [MASK], Throwable cause) {
super([MASK], cause);
}
public ANTLRException(Throwable cause) {
super(cause);
}
} | message | java | jenkins |
package com.google.refine.importers.tree;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.importers.ImporterUtilities;
import com.google.refine.model.Cell;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
public abstract class TreeImportUtilities {
final static Logger logger = LoggerFactory.getLogger("TreeImportUtilities");
static protected void sortRecordElementCandidates(List<RecordElementCandidate> list) {
Collections.sort(list, new Comparator<RecordElementCandidate>() {
@Override
public int compare(RecordElementCandidate o1, RecordElementCandidate o2) {
return o2.count - o1.count;
}
});
}
static public void createColumnsFromImport(
Project project,
ImportColumnGroup columnGroup) {
int startColumnIndex = project.columnModel.columns.size();
List<ImportColumn> columns = new ArrayList<ImportColumn>(columnGroup.columns.values());
Collections.sort(columns, new Comparator<ImportColumn>() {
@Override
public int compare(ImportColumn o1, ImportColumn o2) {
if (o1.blankOnFirstRow != o2.blankOnFirstRow) {
return o1.blankOnFirstRow ? 1 : -1;
}
return o2.nonBlankCount - o1.nonBlankCount;
}
});
for (int i = 0; i < columns.size(); i++) {
ImportColumn c = columns.get(i);
Column column = new com.google.refine.model.Column(c.cellIndex, c.name);
project.columnModel.columns.add(column);
}
List<ImportColumnGroup> subgroups = new ArrayList<ImportColumnGroup>(columnGroup.subgroups.values());
Collections.sort(subgroups, new Comparator<ImportColumnGroup>() {
@Override
public int compare(ImportColumnGroup o1, ImportColumnGroup o2) {
return o2.nonBlankCount - o1.nonBlankCount;
}
});
for (ImportColumnGroup g : subgroups) {
createColumnsFromImport(project, g);
}
int endColumnIndex = project.columnModel.columns.size();
int span = endColumnIndex - startColumnIndex;
if (span > 1 && span < project.columnModel.columns.size()) {
project.columnModel.addColumnGroup(startColumnIndex, span, startColumnIndex);
}
}
@Deprecated
static protected void addCell(
Project project,
ImportColumnGroup columnGroup,
ImportRecord record,
String columnLocalName,
String text) {
addCell(project, columnGroup, record, columnLocalName, text, true, true);
}
static protected void addCell(
Project project,
ImportColumnGroup columnGroup,
ImportRecord record,
String columnLocalName,
String text,
boolean [MASK],
boolean guessDataType) {
Serializable value = text;
if (![MASK] && (text == null || (text).isEmpty())) {
return;
}
if (guessDataType) {
value = ImporterUtilities.parseCellValue(text);
}
addCell(project, columnGroup, record, columnLocalName, value);
}
protected static void addCell(Project project, ImportColumnGroup columnGroup, ImportRecord record,
String columnLocalName, Serializable value) {
ImportColumn column = getColumn(project, columnGroup, columnLocalName);
int cellIndex = column.cellIndex;
int rowIndex = Math.max(columnGroup.nextRowIndex, column.nextRowIndex);
List<Cell> row = record.rows.get(rowIndex);
if (row == null) {
row = new ArrayList<Cell>();
record.rows.set(rowIndex, row);
}
while (cellIndex >= row.size()) {
row.add(null);
}
row.set(cellIndex, new Cell(value, null));
column.nextRowIndex = rowIndex + 1;
column.nonBlankCount++;
}
static protected ImportColumn getColumn(
Project project,
ImportColumnGroup columnGroup,
String localName) {
if (columnGroup.columns.containsKey(localName)) {
return columnGroup.columns.get(localName);
}
ImportColumn column = createColumn(project, columnGroup, localName);
columnGroup.columns.put(localName, column);
return column;
}
static protected ImportColumn createColumn(
Project project,
ImportColumnGroup columnGroup,
String localName) {
ImportColumn newColumn = new ImportColumn();
newColumn.name = columnGroup.name.length() == 0 ? (localName == null ? "Text" : localName)
: (localName == null ? columnGroup.name : (columnGroup.name + " - " + localName));
newColumn.cellIndex = project.columnModel.allocateNewCellIndex();
newColumn.nextRowIndex = columnGroup.nextRowIndex;
return newColumn;
}
static protected ImportColumnGroup getColumnGroup(
Project project,
ImportColumnGroup columnGroup,
String localName) {
if (columnGroup.subgroups.containsKey(localName)) {
return columnGroup.subgroups.get(localName);
}
ImportColumnGroup subgroup = createColumnGroup(project, columnGroup, localName);
columnGroup.subgroups.put(localName, subgroup);
return subgroup;
}
static protected ImportColumnGroup createColumnGroup(
Project project,
ImportColumnGroup columnGroup,
String localName) {
ImportColumnGroup newGroup = new ImportColumnGroup();
newGroup.name = columnGroup.name.length() == 0 ? (localName == null ? "Text" : localName)
: (localName == null ? columnGroup.name : (columnGroup.name + " - " + localName));
newGroup.nextRowIndex = columnGroup.nextRowIndex;
return newGroup;
}
} | storeEmptyString | java | OpenRefine |
package dagger.internal;
import java.lang.reflect.AccessibleObject;
public abstract class Loader {
private final Memoizer<ClassLoader, Memoizer<String, Class<?>>> caches =
new Memoizer<ClassLoader, Memoizer<String, Class<?>>>() {
@Override protected Memoizer<String, Class<?>> create(final ClassLoader classLoader) {
return new Memoizer<String, Class<?>>() {
@Override protected Class<?> create(String className) {
try {
return classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
return Void.class;
}
}
};
}
};
public abstract Binding<?> getAtInjectBinding(
String key, String className, ClassLoader classLoader, boolean mustHaveInjections);
public abstract <T> ModuleAdapter<T> getModuleAdapter(Class<T> moduleClass);
public abstract StaticInjection getStaticInjection(Class<?> [MASK]);
protected Class<?> loadClass(ClassLoader classLoader, String name) {
classLoader = (classLoader != null) ? classLoader : ClassLoader.getSystemClassLoader();
return caches.get(classLoader).get(name);
}
protected <T> T instantiate(String name, ClassLoader classLoader) {
try {
Class<?> generatedClass = loadClass(classLoader, name);
if (generatedClass == Void.class) {
return null;
}
@SuppressWarnings("unchecked")
T instance = (T) generatedClass.newInstance();
return instance;
} catch (InstantiationException e) {
throw new RuntimeException("Failed to initialize " + name, e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Failed to initialize " + name, e);
}
}
} | injectedClass | java | dagger |
package jenkins.security;
import static org.springframework.security.core.context.SecurityContextHolder.getContext;
import static org.springframework.security.core.context.SecurityContextHolder.setContext;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import jenkins.util.InterceptingExecutorService;
import org.springframework.security.core.context.SecurityContext;
public class SecurityContextExecutorService extends InterceptingExecutorService {
public SecurityContextExecutorService(ExecutorService service) {
super(service);
}
@Override
protected Runnable wrap(final Runnable r) {
final SecurityContext callingContext = getContext();
return new Runnable() {
@Override
public void run() {
SecurityContext [MASK] = getContext();
setContext(callingContext);
try {
r.run();
} finally {
setContext([MASK]);
}
}
};
}
@Override
protected <V> Callable<V> wrap(final Callable<V> c) {
final SecurityContext callingContext = getContext();
return new Callable<>() {
@Override
public V call() throws Exception {
SecurityContext [MASK] = getContext();
setContext(callingContext);
try {
return c.call();
} finally {
setContext([MASK]);
}
}
};
}
} | old | java | jenkins |
package com.google.devtools.common.options;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.escape.Escaper;
import com.google.common.html.HtmlEscapers;
import com.google.devtools.common.options.OptionsParser.HelpVerbosity;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class OptionsUsageTest {
private OptionsData data;
private static final Escaper HTML_ESCAPER = HtmlEscapers.htmlEscaper();
@Before
public void setUp() {
data = OptionsParser.getOptionsDataInternal(TestOptions.class);
}
private String getHtmlUsageWithoutTags(String [MASK]) {
StringBuilder builder = new StringBuilder();
OptionsUsage.getUsageHtml(
data.getOptionDefinitionFromName([MASK]), builder, HTML_ESCAPER, data, false, null);
return builder.toString();
}
private String getHtmlUsageWithTags(String [MASK]) {
StringBuilder builder = new StringBuilder();
OptionsUsage.getUsageHtml(
data.getOptionDefinitionFromName([MASK]), builder, HTML_ESCAPER, data, true, null);
return builder.toString();
}
private String getHtmlUsageWithCommandName(String [MASK], String commandName) {
StringBuilder builder = new StringBuilder();
OptionsUsage.getUsageHtml(
data.getOptionDefinitionFromName([MASK]),
builder,
HTML_ESCAPER,
data,
false,
commandName);
return builder.toString();
}
private String getTerminalUsageWithoutTags(String [MASK], HelpVerbosity verbosity) {
StringBuilder builder = new StringBuilder();
OptionsUsage.getUsage(
data.getOptionDefinitionFromName([MASK]), builder, verbosity, data, false);
return builder.toString();
}
private String getTerminalUsageWithTags(String [MASK], HelpVerbosity verbosity) {
StringBuilder builder = new StringBuilder();
OptionsUsage.getUsage(
data.getOptionDefinitionFromName([MASK]), builder, verbosity, data, true);
return builder.toString();
}
@Test
public void commandNameAnchorId_htmlOutput() {
assertThat(getHtmlUsageWithCommandName("test_string", "command_name"))
.isEqualTo(
"<dt id=\"command_name-flag--test_string\"><code id=\"test_string\"><a"
+ " href=\"#command_name-flag--test_string\">--test_string</a>=<a"
+ " string></code> default: \"test string default\"</dt>\n"
+ "<dd>\n"
+ "a string-valued option to test simple option operations\n"
+ "</dd>\n");
}
@Test
public void stringValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_string", HelpVerbosity.SHORT))
.isEqualTo(" --test_string\n");
assertThat(getTerminalUsageWithoutTags("test_string", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_string", HelpVerbosity.SHORT));
}
@Test
public void stringValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_string", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_string (a string; default: \"test string default\")\n");
assertThat(getTerminalUsageWithoutTags("test_string", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_string", HelpVerbosity.MEDIUM));
}
@Test
public void stringValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_string", HelpVerbosity.LONG))
.isEqualTo(
" --test_string (a string; default: \"test string default\")\n"
+ " a string-valued option to test simple option operations\n");
assertThat(getTerminalUsageWithTags("test_string", HelpVerbosity.LONG))
.isEqualTo(
" --test_string (a string; default: \"test string default\")\n"
+ " a string-valued option to test simple option operations\n"
+ " Tags: no_op\n");
}
@Test
public void stringValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_string"))
.isEqualTo(
"<dt id=\"flag--test_string\"><code><a href=\"#flag--test_string\">--test_string</a>"
+ "=<a string></code> default: \"test string default\"</dt>\n"
+ "<dd>\n"
+ "a string-valued option to test simple option operations\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_string"))
.isEqualTo(
"<dt id=\"flag--test_string\"><code><a href=\"#flag--test_string\">--test_string</a>"
+ "=<a string></code> default: \"test string default\"</dt>\n"
+ "<dd>\n"
+ "a string-valued option to test simple option operations\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
@Test
public void intValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_c", HelpVerbosity.SHORT))
.isEqualTo(" --expanded_c\n");
assertThat(getTerminalUsageWithoutTags("expanded_c", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("expanded_c", HelpVerbosity.SHORT));
}
@Test
public void intValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_c", HelpVerbosity.MEDIUM))
.isEqualTo(" --expanded_c (an integer; default: \"12\")\n");
assertThat(getTerminalUsageWithoutTags("expanded_c", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("expanded_c", HelpVerbosity.MEDIUM));
}
@Test
public void intValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_c", HelpVerbosity.LONG))
.isEqualTo(
" --expanded_c (an integer; default: \"12\")\n"
+ " an int-value'd flag used to test expansion logic\n");
assertThat(getTerminalUsageWithTags("expanded_c", HelpVerbosity.LONG))
.isEqualTo(
" --expanded_c (an integer; default: \"12\")\n"
+ " an int-value'd flag used to test expansion logic\n"
+ " Tags: no_op\n");
}
@Test
public void intValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("expanded_c"))
.isEqualTo(
"<dt id=\"flag--expanded_c\"><code>"
+ "<a href=\"#flag--expanded_c\">--expanded_c</a>"
+ "=<an integer></code> default: \"12\"</dt>\n"
+ "<dd>\n"
+ "an int-value'd flag used to test expansion logic\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("expanded_c"))
.isEqualTo(
"<dt id=\"flag--expanded_c\"><code>"
+ "<a href=\"#flag--expanded_c\">--expanded_c</a>"
+ "=<an integer></code> default: \"12\"</dt>\n"
+ "<dd>\n"
+ "an int-value'd flag used to test expansion logic\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
@Test
public void booleanValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.SHORT))
.isEqualTo(" --[no]expanded_a\n");
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("expanded_a", HelpVerbosity.SHORT));
}
@Test
public void booleanValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.MEDIUM))
.isEqualTo(" --[no]expanded_a (a boolean; default: \"true\")\n");
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("expanded_a", HelpVerbosity.MEDIUM));
}
@Test
public void booleanValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.LONG))
.isEqualTo(
" --[no]expanded_a (a boolean; default: \"true\")\n"
+ " A boolean flag with unknown effect to test tagless usage text.\n");
assertThat(getTerminalUsageWithoutTags("expanded_a", HelpVerbosity.LONG))
.isEqualTo(getTerminalUsageWithTags("expanded_a", HelpVerbosity.LONG));
}
@Test
public void booleanValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("expanded_a"))
.isEqualTo(
"<dt id=\"flag--expanded_a\"><code><a href=\"#flag--expanded_a\">"
+ "--[no]expanded_a</a></code> default: \"true\"</dt>\n"
+ "<dd>\n"
+ "A boolean flag with unknown effect to test tagless usage text.\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithoutTags("expanded_a")).isEqualTo(getHtmlUsageWithTags("expanded_a"));
}
@Test
public void multipleValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_multiple_string", HelpVerbosity.SHORT))
.isEqualTo(" --test_multiple_string\n");
assertThat(getTerminalUsageWithoutTags("test_multiple_string", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_multiple_string", HelpVerbosity.SHORT));
}
@Test
public void multipleValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_multiple_string", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_multiple_string (a string; may be used multiple times)\n");
assertThat(getTerminalUsageWithoutTags("test_multiple_string", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_multiple_string", HelpVerbosity.MEDIUM));
}
@Test
public void multipleValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_multiple_string", HelpVerbosity.LONG))
.isEqualTo(
" --test_multiple_string (a string; may be used multiple times)\n"
+ " a repeatable string-valued flag with its own unhelpful help text\n");
assertThat(getTerminalUsageWithTags("test_multiple_string", HelpVerbosity.LONG))
.isEqualTo(
" --test_multiple_string (a string; may be used multiple times)\n"
+ " a repeatable string-valued flag with its own unhelpful help text\n"
+ " Tags: no_op\n");
}
@Test
public void multipleValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_multiple_string"))
.isEqualTo(
"<dt id=\"flag--test_multiple_string\"><code>"
+ "<a href=\"#flag--test_multiple_string\">--test_multiple_string</a>"
+ "=<a string></code> "
+ "multiple uses are accumulated</dt>\n"
+ "<dd>\n"
+ "a repeatable string-valued flag with its own unhelpful help text\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_multiple_string"))
.isEqualTo(
"<dt id=\"flag--test_multiple_string\"><code>"
+ "<a href=\"#flag--test_multiple_string\">--test_multiple_string</a>"
+ "=<a string></code> "
+ "multiple uses are accumulated</dt>\n"
+ "<dd>\n"
+ "a repeatable string-valued flag with its own unhelpful help text\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
@Test
public void customConverterValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_list_converters", HelpVerbosity.SHORT))
.isEqualTo(" --test_list_converters\n");
assertThat(getTerminalUsageWithoutTags("test_list_converters", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_list_converters", HelpVerbosity.SHORT));
}
@Test
public void customConverterValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_list_converters", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_list_converters (a list of strings; may be used multiple times)\n");
assertThat(getTerminalUsageWithoutTags("test_list_converters", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_list_converters", HelpVerbosity.MEDIUM));
}
@Test
public void customConverterValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_list_converters", HelpVerbosity.LONG))
.isEqualTo(
" --test_list_converters (a list of strings; may be used multiple times)\n"
+ " a repeatable flag that accepts lists, but doesn't want to have lists of \n"
+ " lists as a final type\n");
assertThat(getTerminalUsageWithTags("test_list_converters", HelpVerbosity.LONG))
.isEqualTo(
" --test_list_converters (a list of strings; may be used multiple times)\n"
+ " a repeatable flag that accepts lists, but doesn't want to have lists of \n"
+ " lists as a final type\n"
+ " Tags: no_op\n");
}
@Test
public void customConverterValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_list_converters"))
.isEqualTo(
"<dt id=\"flag--test_list_converters\"><code>"
+ "<a href=\"#flag--test_list_converters\">--test_list_converters</a>"
+ "=<a list of strings></code> "
+ "multiple uses are accumulated</dt>\n"
+ "<dd>\n"
+ "a repeatable flag that accepts lists, but doesn't want to have lists of "
+ "lists as a final type\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_list_converters"))
.isEqualTo(
"<dt id=\"flag--test_list_converters\"><code>"
+ "<a href=\"#flag--test_list_converters\">--test_list_converters</a>"
+ "=<a list of strings></code> "
+ "multiple uses are accumulated</dt>\n"
+ "<dd>\n"
+ "a repeatable flag that accepts lists, but doesn't want to have lists of "
+ "lists as a final type\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
@Test
public void staticExpansionOption_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion", HelpVerbosity.SHORT))
.isEqualTo(" --test_expansion\n");
assertThat(getTerminalUsageWithoutTags("test_expansion", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_expansion", HelpVerbosity.SHORT));
}
@Test
public void staticExpansionOption_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_expansion\n");
assertThat(getTerminalUsageWithoutTags("test_expansion", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_expansion", HelpVerbosity.MEDIUM));
}
@Test
public void staticExpansionOption_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion", HelpVerbosity.LONG))
.isEqualTo(
" --test_expansion\n"
+ " this expands to an alphabet soup.\n"
+ " Expands to: --noexpanded_a --expanded_b=false --expanded_c 42 --\n"
+ " expanded_d bar \n");
assertThat(getTerminalUsageWithTags("test_expansion", HelpVerbosity.LONG))
.isEqualTo(
" --test_expansion\n"
+ " this expands to an alphabet soup.\n"
+ " Expands to: --noexpanded_a --expanded_b=false --expanded_c 42 --\n"
+ " expanded_d bar \n"
+ " Tags: no_op\n");
}
@Test
public void staticExpansionOption_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_expansion"))
.isEqualTo(
"<dt id=\"flag--test_expansion\"><code><a href=\"#flag--test_expansion\">"
+ "--test_expansion</a></code></dt>\n"
+ "<dd>\n"
+ "this expands to an alphabet soup.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code>"
+ "<a href=\"#flag--noexpanded_a\">--noexpanded_a</a></code><br/>\n"
+ " <code>"
+ "<a href=\"#flag--expanded_b\">--expanded_b=false</a></code><br/>\n"
+ " <code><a href=\"#flag--expanded_c\">--expanded_c</a></code><br/>\n"
+ " <code><a href=\"#flag42\">42</a></code><br/>\n"
+ " <code><a href=\"#flag--expanded_d\">--expanded_d</a></code><br/>\n"
+ " <code><a href=\"#flagbar\">bar</a></code><br/>\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_expansion"))
.isEqualTo(
"<dt id=\"flag--test_expansion\"><code><a href=\"#flag--test_expansion\">"
+ "--test_expansion</a></code></dt>\n"
+ "<dd>\n"
+ "this expands to an alphabet soup.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code>"
+ "<a href=\"#flag--noexpanded_a\">--noexpanded_a</a></code><br/>\n"
+ " <code>"
+ "<a href=\"#flag--expanded_b\">--expanded_b=false</a></code><br/>\n"
+ " <code><a href=\"#flag--expanded_c\">--expanded_c</a></code><br/>\n"
+ " <code><a href=\"#flag42\">42</a></code><br/>\n"
+ " <code><a href=\"#flag--expanded_d\">--expanded_d</a></code><br/>\n"
+ " <code><a href=\"#flagbar\">bar</a></code><br/>\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
@Test
public void recursiveExpansionOption_shortTerminalOutput() {
assertThat(
getTerminalUsageWithoutTags("test_recursive_expansion_top_level", HelpVerbosity.SHORT))
.isEqualTo(" --test_recursive_expansion_top_level\n");
assertThat(
getTerminalUsageWithoutTags("test_recursive_expansion_top_level", HelpVerbosity.SHORT))
.isEqualTo(
getTerminalUsageWithTags("test_recursive_expansion_top_level", HelpVerbosity.SHORT));
}
@Test
public void recursiveExpansionOption_mediumTerminalOutput() {
assertThat(
getTerminalUsageWithoutTags("test_recursive_expansion_top_level", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_recursive_expansion_top_level\n");
assertThat(
getTerminalUsageWithoutTags("test_recursive_expansion_top_level", HelpVerbosity.MEDIUM))
.isEqualTo(
getTerminalUsageWithTags("test_recursive_expansion_top_level", HelpVerbosity.MEDIUM));
}
@Test
public void recursiveExpansionOption_longTerminalOutput() {
assertThat(
getTerminalUsageWithoutTags("test_recursive_expansion_top_level", HelpVerbosity.LONG))
.isEqualTo(
" --test_recursive_expansion_top_level\n"
+ " Lets the children do all the work.\n"
+ " Expands to: --test_recursive_expansion_middle1 --\n"
+ " test_recursive_expansion_middle2 \n");
assertThat(getTerminalUsageWithTags("test_recursive_expansion_top_level", HelpVerbosity.LONG))
.isEqualTo(
" --test_recursive_expansion_top_level\n"
+ " Lets the children do all the work.\n"
+ " Expands to: --test_recursive_expansion_middle1 --\n"
+ " test_recursive_expansion_middle2 \n"
+ " Tags: no_op\n");
}
@Test
public void recursiveExpansionOption_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_recursive_expansion_top_level"))
.isEqualTo(
"<dt id=\"flag--test_recursive_expansion_top_level\"><code><a"
+ " href=\"#flag--test_recursive_expansion_top_level\">--test_recursive_expansion_top_level</a></code></dt>\n"
+ "<dd>\n"
+ "Lets the children do all the work.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code><a"
+ " href=\"#flag--test_recursive_expansion_middle1\">--test_recursive_expansion_middle1</a></code><br/>\n"
+ " <code><a"
+ " href=\"#flag--test_recursive_expansion_middle2\">--test_recursive_expansion_middle2</a></code><br/>\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_recursive_expansion_top_level"))
.isEqualTo(
"<dt id=\"flag--test_recursive_expansion_top_level\"><code><a"
+ " href=\"#flag--test_recursive_expansion_top_level\">--test_recursive_expansion_top_level</a></code></dt>\n"
+ "<dd>\n"
+ "Lets the children do all the work.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code><a"
+ " href=\"#flag--test_recursive_expansion_middle1\">--test_recursive_expansion_middle1</a></code><br/>\n"
+ " <code><a"
+ " href=\"#flag--test_recursive_expansion_middle2\">--test_recursive_expansion_middle2</a></code><br/>\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a></dd>\n");
}
@Test
public void expansionToMultipleValue_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion_to_repeatable", HelpVerbosity.SHORT))
.isEqualTo(" --test_expansion_to_repeatable\n");
assertThat(getTerminalUsageWithoutTags("test_expansion_to_repeatable", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_expansion_to_repeatable", HelpVerbosity.SHORT));
}
@Test
public void expansionToMultipleValue_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion_to_repeatable", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_expansion_to_repeatable\n");
assertThat(getTerminalUsageWithoutTags("test_expansion_to_repeatable", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_expansion_to_repeatable", HelpVerbosity.MEDIUM));
}
@Test
public void expansionToMultipleValue_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_expansion_to_repeatable", HelpVerbosity.LONG))
.isEqualTo(
" --test_expansion_to_repeatable\n"
+ " Go forth and multiply, they said.\n"
+ " Expands to: --test_multiple_string=expandedFirstValue --\n"
+ " test_multiple_string=expandedSecondValue \n");
assertThat(getTerminalUsageWithTags("test_expansion_to_repeatable", HelpVerbosity.LONG))
.isEqualTo(
" --test_expansion_to_repeatable\n"
+ " Go forth and multiply, they said.\n"
+ " Expands to: --test_multiple_string=expandedFirstValue --\n"
+ " test_multiple_string=expandedSecondValue \n"
+ " Tags: no_op\n");
}
@Test
public void expansionToMultipleValue_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_expansion_to_repeatable"))
.isEqualTo(
"<dt id=\"flag--test_expansion_to_repeatable\"><code><a"
+ " href=\"#flag--test_expansion_to_repeatable\">--test_expansion_to_repeatable</a></code></dt>\n"
+ "<dd>\n"
+ "Go forth and multiply, they said.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code><a"
+ " href=\"#flag--test_multiple_string\">--test_multiple_string=expandedFirstValue</a></code><br/>\n"
+ " <code><a"
+ " href=\"#flag--test_multiple_string\">--test_multiple_string=expandedSecondValue</a></code><br/>\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_expansion_to_repeatable"))
.isEqualTo(
"<dt id=\"flag--test_expansion_to_repeatable\"><code><a"
+ " href=\"#flag--test_expansion_to_repeatable\">--test_expansion_to_repeatable</a></code></dt>\n"
+ "<dd>\n"
+ "Go forth and multiply, they said.\n"
+ "<br/>\n"
+ "Expands to:<br/>\n"
+ " <code><a"
+ " href=\"#flag--test_multiple_string\">--test_multiple_string=expandedFirstValue</a></code><br/>\n"
+ " <code><a"
+ " href=\"#flag--test_multiple_string\">--test_multiple_string=expandedSecondValue</a></code><br/>\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a></dd>\n");
}
@Test
public void implicitRequirementOption_shortTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_implicit_requirement", HelpVerbosity.SHORT))
.isEqualTo(" --test_implicit_requirement\n");
assertThat(getTerminalUsageWithoutTags("test_implicit_requirement", HelpVerbosity.SHORT))
.isEqualTo(getTerminalUsageWithTags("test_implicit_requirement", HelpVerbosity.SHORT));
}
@Test
public void implicitRequirementOption_mediumTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_implicit_requirement", HelpVerbosity.MEDIUM))
.isEqualTo(" --test_implicit_requirement (a string; default: \"direct implicit\")\n");
assertThat(getTerminalUsageWithoutTags("test_implicit_requirement", HelpVerbosity.MEDIUM))
.isEqualTo(getTerminalUsageWithTags("test_implicit_requirement", HelpVerbosity.MEDIUM));
}
@Test
public void implicitRequirementOption_longTerminalOutput() {
assertThat(getTerminalUsageWithoutTags("test_implicit_requirement", HelpVerbosity.LONG))
.isEqualTo(
" --test_implicit_requirement (a string; default: \"direct implicit\")\n"
+ " this option really needs that other one, isolation of purpose has failed.\n"
+ " Using this option will also add: --implicit_requirement_a=implicit \n"
+ " requirement, required \n");
assertThat(getTerminalUsageWithTags("test_implicit_requirement", HelpVerbosity.LONG))
.isEqualTo(
" --test_implicit_requirement (a string; default: \"direct implicit\")\n"
+ " this option really needs that other one, isolation of purpose has failed.\n"
+ " Using this option will also add: --implicit_requirement_a=implicit \n"
+ " requirement, required \n"
+ " Tags: no_op\n");
}
@Test
public void implicitRequirementOption_htmlOutput() {
assertThat(getHtmlUsageWithoutTags("test_implicit_requirement"))
.isEqualTo(
"<dt id=\"flag--test_implicit_requirement\"><code>"
+ "<a href=\"#flag--test_implicit_requirement\">--test_implicit_requirement</a>"
+ "=<a string></code> "
+ "default: \"direct implicit\"</dt>\n"
+ "<dd>\n"
+ "this option really needs that other one, isolation of purpose has failed.\n"
+ "</dd>\n");
assertThat(getHtmlUsageWithTags("test_implicit_requirement"))
.isEqualTo(
"<dt id=\"flag--test_implicit_requirement\"><code>"
+ "<a href=\"#flag--test_implicit_requirement\">--test_implicit_requirement</a>"
+ "=<a string></code> "
+ "default: \"direct implicit\"</dt>\n"
+ "<dd>\n"
+ "this option really needs that other one, isolation of purpose has failed.\n"
+ "<br>Tags:\n"
+ "<a href=\"#effect_tag_NO_OP\"><code>no_op</code></a>"
+ "</dd>\n");
}
} | fieldName | java | bazel |
package org.springframework.cache.interceptor;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.jspecify.annotations.Nullable;
import org.springframework.core.KotlinDetector;
public class SimpleKeyGenerator implements KeyGenerator {
@Override
public Object generate(Object [MASK], Method method, @Nullable Object... params) {
return generateKey((KotlinDetector.isSuspendingFunction(method) ?
Arrays.copyOf(params, params.length - 1) : params));
}
public static Object generateKey(@Nullable Object... params) {
if (params.length == 0) {
return SimpleKey.EMPTY;
}
if (params.length == 1) {
Object param = params[0];
if (param != null && !param.getClass().isArray()) {
return param;
}
}
return new SimpleKey(params);
}
} | target | java | spring-framework |
package org.elasticsearch.entitlement.bridge;
import java.util.Optional;
import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE;
public class Util {
public static final Class<?> NO_CLASS = new Object() {
}.getClass();
@SuppressWarnings("unused")
public static Class<?> getCallerClass() {
Optional<Class<?>> [MASK] = StackWalker.getInstance(RETAIN_CLASS_REFERENCE)
.walk(
frames -> frames.skip(2)
.findFirst()
.map(StackWalker.StackFrame::getDeclaringClass)
);
return [MASK].orElse(NO_CLASS);
}
} | callerClassIfAny | java | elasticsearch |
package org.springframework.web.accept;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
public class SemanticApiVersionParserTests {
private final SemanticApiVersionParser parser = new SemanticApiVersionParser();
@Test
void parse() {
testParse("0", 0, 0, 0);
testParse("0.3", 0, 3, 0);
testParse("4.5", 4, 5, 0);
testParse("6.7.8", 6, 7, 8);
testParse("v01", 1, 0, 0);
testParse("version-1.2", 1, 2, 0);
}
private void testParse(String input, int major, int [MASK], int patch) {
SemanticApiVersionParser.Version actual = this.parser.parseVersion(input);
assertThat(actual.getMajor()).isEqualTo(major);
assertThat(actual.getMinor()).isEqualTo([MASK]);
assertThat(actual.getPatch()).isEqualTo(patch);
}
@ParameterizedTest
@ValueSource(strings = {"", "v", "1a", "1.0a", "1.0.0a", "1.0.0.", "1.0.0-"})
void parseInvalid(String input) {
testParseInvalid(input);
}
private void testParseInvalid(String input) {
assertThatIllegalStateException().isThrownBy(() -> this.parser.parseVersion(input))
.withMessage("Invalid API version format");
}
} | minor | java | spring-framework |
package com.google.devtools.build.lib.server;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.testutil.TestUtils.WAIT_TIMEOUT_SECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertThrows;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.clock.JavaClock;
import com.google.devtools.build.lib.runtime.BlazeCommandResult;
import com.google.devtools.build.lib.runtime.CommandDispatcher;
import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy;
import com.google.devtools.build.lib.server.CommandProtos.CancelRequest;
import com.google.devtools.build.lib.server.CommandProtos.CancelResponse;
import com.google.devtools.build.lib.server.CommandProtos.EnvironmentVariable;
import com.google.devtools.build.lib.server.CommandProtos.RunRequest;
import com.google.devtools.build.lib.server.CommandProtos.RunResponse;
import com.google.devtools.build.lib.server.CommandServerGrpc.CommandServerStub;
import com.google.devtools.build.lib.server.FailureDetails.Command;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.server.FailureDetails.GrpcServer;
import com.google.devtools.build.lib.server.FailureDetails.Interrupted;
import com.google.devtools.build.lib.server.FailureDetails.Interrupted.Code;
import com.google.devtools.build.lib.server.GrpcServerImpl.BlockingStreamObserver;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.io.CommandExtensionReporter;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.build.lib.vfs.DigestHashFunction;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.BytesValue;
import com.google.protobuf.Int32Value;
import com.google.protobuf.StringValue;
import io.grpc.ManagedChannel;
import io.grpc.Server;
import io.grpc.inprocess.InProcessChannelBuilder;
import io.grpc.inprocess.InProcessServerBuilder;
import io.grpc.stub.ServerCallStreamObserver;
import io.grpc.stub.StreamObserver;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class GrpcServerTest {
private static final int SERVER_PID = 42;
private static final String REQUEST_COOKIE = "request-cookie";
private final FileSystem fileSystem = new InMemoryFileSystem(DigestHashFunction.SHA256);
private Server server;
private ManagedChannel channel;
private void createServer(CommandDispatcher dispatcher) throws Exception {
Path serverDirectory = fileSystem.getPath("/bazel_server_directory");
serverDirectory.createDirectoryAndParents();
GrpcServerImpl serverImpl =
new GrpcServerImpl(
dispatcher,
ShutdownHooks.createUnregistered(),
new PidFileWatcher(fileSystem.getPath("/thread-not-running-dont-need"), SERVER_PID),
new JavaClock(),
-1,
REQUEST_COOKIE,
"response-cookie",
serverDirectory,
SERVER_PID,
1000,
false,
false,
"slow interrupt message suffix");
String uniqueName = InProcessServerBuilder.generateName();
server =
InProcessServerBuilder.forName(uniqueName)
.directExecutor()
.addService(serverImpl)
.build()
.start();
channel = InProcessChannelBuilder.forName(uniqueName).directExecutor().build();
}
private static RunRequest createRequest(String... args) {
return RunRequest.newBuilder()
.setCookie(REQUEST_COOKIE)
.setClientDescription("client-description")
.addAllArg(Arrays.stream(args).map(ByteString::copyFromUtf8).collect(Collectors.toList()))
.build();
}
private static RunRequest createPreemptibleRequest(String... args) {
return RunRequest.newBuilder()
.setCookie(REQUEST_COOKIE)
.setClientDescription("client-description")
.setPreemptible(true)
.addAllArg(Arrays.stream(args).map(ByteString::copyFromUtf8).collect(Collectors.toList()))
.build();
}
@Test
public void testSendingSimpleMessage() throws Exception {
Any commandExtension = Any.pack(EnvironmentVariable.getDefaultInstance());
AtomicReference<List<String>> argsReceived = new AtomicReference<>();
AtomicReference<List<Any>> commandExtensionsReceived = new AtomicReference<>();
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter) {
argsReceived.set(args);
commandExtensionsReceived.set(commandExtensions);
return BlazeCommandResult.success();
}
};
createServer(dispatcher);
CountDownLatch done = new CountDownLatch(1);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
stub.run(
createRequest("Foo").toBuilder().addCommandExtensions(commandExtension).build(),
createResponseObserver(responses, done));
done.await();
server.shutdown();
server.awaitTermination();
assertThat(argsReceived.get()).containsExactly("Foo");
assertThat(commandExtensionsReceived.get()).containsExactly(commandExtension);
assertThat(responses).hasSize(2);
assertThat(responses.get(0).getFinished()).isFalse();
assertThat(responses.get(0).getCookie()).isNotEmpty();
assertThat(responses.get(1).getFinished()).isTrue();
assertThat(responses.get(1).getExitCode()).isEqualTo(0);
assertThat(responses.get(1).hasFailureDetail()).isFalse();
}
@Test
public void testReceiveStreamingCommandExtensions() throws Exception {
Any commandExtension1 = Any.pack(Int32Value.of(4));
Any commandExtension2 = Any.pack(Int32Value.of(8));
Any commandExtension3 = Any.pack(Int32Value.of(15));
CountDownLatch afterFirstExtensionLatch = new CountDownLatch(1);
CountDownLatch beforeSecondExtensionLatch = new CountDownLatch(1);
CountDownLatch afterSecondExtensionLatch = new CountDownLatch(1);
CountDownLatch beforeThirdExtensionLatch = new CountDownLatch(1);
CountDownLatch afterThirdExtensionLatch = new CountDownLatch(1);
CommandDispatcher dispatcher =
(policy,
args,
outErr,
lockMode,
uiVerbosity,
clientDesc,
startMs,
startOpts,
cmdExts,
cmdExtOut) -> {
cmdExtOut.report(commandExtension1);
afterFirstExtensionLatch.countDown();
beforeSecondExtensionLatch.await(WAIT_TIMEOUT_SECONDS, SECONDS);
cmdExtOut.report(commandExtension2);
afterSecondExtensionLatch.countDown();
beforeThirdExtensionLatch.await(WAIT_TIMEOUT_SECONDS, SECONDS);
cmdExtOut.report(commandExtension3);
afterThirdExtensionLatch.countDown();
return BlazeCommandResult.success();
};
createServer(dispatcher);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
CountDownLatch done = new CountDownLatch(1);
stub.run(createRequest("Foo"), createResponseObserver(responses, done));
afterFirstExtensionLatch.await();
assertThat(Iterables.getLast(responses).getCommandExtensionsList())
.containsExactly(commandExtension1);
beforeSecondExtensionLatch.countDown();
afterSecondExtensionLatch.await();
assertThat(Iterables.getLast(responses).getCommandExtensionsList())
.containsExactly(commandExtension2);
beforeThirdExtensionLatch.countDown();
afterThirdExtensionLatch.await();
done.await();
assertThat(responses.get(responses.size() - 2).getCommandExtensionsList())
.containsExactly(commandExtension3);
server.shutdown();
server.awaitTermination();
}
@Test
public void testClosingClientShouldInterrupt() throws Exception {
CountDownLatch done = new CountDownLatch(1);
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter) {
synchronized (this) {
assertThrows(InterruptedException.class, this::wait);
}
done.countDown();
return BlazeCommandResult.failureDetail(
FailureDetail.newBuilder()
.setInterrupted(Interrupted.newBuilder().setCode(Code.INTERRUPTED_UNKNOWN))
.build());
}
};
createServer(dispatcher);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
createRequest("Foo"),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
server.shutdownNow();
done.countDown();
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
server.awaitTermination();
done.await();
}
@Test
public void testStream() throws Exception {
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter) {
OutputStream out = outErr.getOutputStream();
try {
commandExtensionReporter.report(Any.pack(Int32Value.of(23)));
for (int i = 0; i < 10; i++) {
out.write(new byte[1024]);
}
commandExtensionReporter.report(Any.pack(Int32Value.of(42)));
} catch (IOException e) {
throw new IllegalStateException(e);
}
return BlazeCommandResult.withResponseExtensions(
BlazeCommandResult.success(),
ImmutableList.of(
Any.pack(StringValue.of("foo")),
Any.pack(BytesValue.of(ByteString.copyFromUtf8("bar")))),
true);
}
};
createServer(dispatcher);
CountDownLatch done = new CountDownLatch(1);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
stub.run(createRequest("Foo"), createResponseObserver(responses, done));
done.await();
server.shutdown();
server.awaitTermination();
assertThat(responses).hasSize(14);
assertThat(responses.get(0).getFinished()).isFalse();
assertThat(responses.get(0).getCookie()).isNotEmpty();
assertThat(responses.get(1).getFinished()).isFalse();
assertThat(responses.get(1).getCookie()).isNotEmpty();
assertThat(responses.get(1).getCommandExtensionsList())
.containsExactly(Any.pack(Int32Value.of(23)));
for (int i = 2; i < 12; i++) {
assertThat(responses.get(i).getFinished()).isFalse();
assertThat(responses.get(i).getStandardOutput().toByteArray()).isEqualTo(new byte[1024]);
assertThat(responses.get(i).getCommandExtensionsList()).isEmpty();
}
assertThat(responses.get(12).getFinished()).isFalse();
assertThat(responses.get(12).getCookie()).isNotEmpty();
assertThat(responses.get(12).getCommandExtensionsList())
.containsExactly(Any.pack(Int32Value.of(42)));
assertThat(responses.get(13).getFinished()).isTrue();
assertThat(responses.get(13).getExitCode()).isEqualTo(0);
assertThat(responses.get(13).hasFailureDetail()).isFalse();
assertThat(responses.get(13).getCommandExtensionsList())
.containsExactly(
Any.pack(StringValue.of("foo")),
Any.pack(BytesValue.of(ByteString.copyFromUtf8("bar"))));
}
@Test
public void badCookie() throws Exception {
runBadCommandTest(
RunRequest.newBuilder().setCookie("bad-cookie").setClientDescription("client-description"),
FailureDetail.newBuilder()
.setMessage("Invalid RunRequest: bad cookie")
.setGrpcServer(GrpcServer.newBuilder().setCode(GrpcServer.Code.BAD_COOKIE))
.build());
}
@Test
public void emptyClientDescription() throws Exception {
runBadCommandTest(
RunRequest.newBuilder().setCookie(REQUEST_COOKIE).setClientDescription(""),
FailureDetail.newBuilder()
.setMessage("Invalid RunRequest: no client description")
.setGrpcServer(GrpcServer.newBuilder().setCode(GrpcServer.Code.NO_CLIENT_DESCRIPTION))
.build());
}
private void runBadCommandTest(RunRequest.Builder runRequestBuilder, FailureDetail failureDetail)
throws Exception {
createServer(throwingDispatcher());
CountDownLatch done = new CountDownLatch(1);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
stub.run(
runRequestBuilder.addArg(ByteString.copyFromUtf8("Foo")).build(),
createResponseObserver(responses, done));
done.await();
server.shutdown();
server.awaitTermination();
assertThat(responses).hasSize(1);
assertThat(responses.get(0).getFinished()).isTrue();
assertThat(responses.get(0).getExitCode()).isEqualTo(36);
assertThat(responses.get(0).hasFailureDetail()).isTrue();
assertThat(responses.get(0).getFailureDetail()).isEqualTo(failureDetail);
}
@Test
public void unparseableInvocationPolicy() throws Exception {
createServer(throwingDispatcher());
CountDownLatch done = new CountDownLatch(1);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
stub.run(
RunRequest.newBuilder()
.setCookie(REQUEST_COOKIE)
.setClientDescription("client-description")
.setInvocationPolicy("invalid-invocation-policy")
.addArg(ByteString.copyFromUtf8("Foo"))
.build(),
createResponseObserver(responses, done));
done.await();
server.shutdown();
server.awaitTermination();
assertThat(responses).hasSize(3);
assertThat(responses.get(2).getFinished()).isTrue();
assertThat(responses.get(2).getExitCode()).isEqualTo(2);
assertThat(responses.get(2).hasFailureDetail()).isTrue();
assertThat(responses.get(2).getFailureDetail())
.isEqualTo(
FailureDetail.newBuilder()
.setMessage(
"Invocation policy parsing failed: Malformed value of --invocation_policy: "
+ "invalid-invocation-policy")
.setCommand(
Command.newBuilder().setCode(Command.Code.INVOCATION_POLICY_PARSE_FAILURE))
.build());
}
@Test
public void testInterruptStream() throws Exception {
CountDownLatch done = new CountDownLatch(1);
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter) {
OutputStream out = outErr.getOutputStream();
try {
while (true) {
if (Thread.interrupted()) {
return BlazeCommandResult.failureDetail(
FailureDetail.newBuilder()
.setInterrupted(
Interrupted.newBuilder().setCode(Code.INTERRUPTED_UNKNOWN))
.build());
}
out.write(new byte[1024]);
}
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
};
createServer(dispatcher);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
List<RunResponse> responses = new ArrayList<>();
stub.run(
createRequest("Foo"),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
responses.add(value);
if (responses.size() == 10) {
server.shutdownNow();
}
}
@Override
public void onError(Throwable t) {
done.countDown();
}
@Override
public void onCompleted() {
done.countDown();
}
});
server.awaitTermination();
done.await();
}
@Test
public void testCancel() throws Exception {
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter)
throws InterruptedException {
synchronized (this) {
this.wait();
}
throw new IllegalStateException();
}
};
createServer(dispatcher);
AtomicReference<String> commandId = new AtomicReference<>();
CountDownLatch gotCommandId = new CountDownLatch(1);
AtomicReference<RunResponse> secondResponse = new AtomicReference<>();
CountDownLatch gotSecondResponse = new CountDownLatch(1);
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
createRequest("Foo"),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
String previousCommandId = commandId.getAndSet(value.getCommandId());
if (previousCommandId == null) {
gotCommandId.countDown();
} else {
secondResponse.set(value);
gotSecondResponse.countDown();
}
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
gotCommandId.await();
CountDownLatch cancelRequestComplete = new CountDownLatch(1);
CancelRequest [MASK] =
CancelRequest.newBuilder().setCookie(REQUEST_COOKIE).setCommandId(commandId.get()).build();
stub.cancel(
[MASK],
new StreamObserver<CancelResponse>() {
@Override
public void onNext(CancelResponse value) {}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {
cancelRequestComplete.countDown();
}
});
cancelRequestComplete.await();
gotSecondResponse.await();
server.shutdown();
server.awaitTermination();
assertThat(secondResponse.get().getFinished()).isTrue();
assertThat(secondResponse.get().getExitCode()).isEqualTo(8);
assertThat(secondResponse.get().hasFailureDetail()).isTrue();
assertThat(secondResponse.get().getFailureDetail().hasInterrupted()).isTrue();
assertThat(secondResponse.get().getFailureDetail().getInterrupted().getCode())
.isEqualTo(Code.INTERRUPTED);
}
@Test
public void testPreeempt() throws Exception {
String firstCommandArg = "Foo";
String secondCommandArg = "Bar";
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter) {
if (args.contains(firstCommandArg)) {
while (true) {
try {
Thread.sleep(TestUtils.WAIT_TIMEOUT_MILLISECONDS);
} catch (InterruptedException e) {
return BlazeCommandResult.failureDetail(
FailureDetail.newBuilder()
.setInterrupted(Interrupted.newBuilder().setCode(Code.INTERRUPTED))
.build());
}
}
} else {
return BlazeCommandResult.success();
}
}
};
createServer(dispatcher);
CountDownLatch gotFoo = new CountDownLatch(1);
AtomicReference<RunResponse> lastFooResponse = new AtomicReference<>();
AtomicReference<RunResponse> lastBarResponse = new AtomicReference<>();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
createPreemptibleRequest(firstCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
gotFoo.countDown();
lastFooResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
gotFoo.await();
CountDownLatch gotBar = new CountDownLatch(1);
stub.run(
createRequest(secondCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
gotBar.countDown();
lastBarResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
gotBar.await();
server.shutdown();
server.awaitTermination();
assertThat(lastBarResponse.get().getFinished()).isTrue();
assertThat(lastBarResponse.get().getExitCode()).isEqualTo(0);
assertThat(lastFooResponse.get().getFinished()).isTrue();
assertThat(lastFooResponse.get().getExitCode()).isEqualTo(8);
assertThat(lastFooResponse.get().hasFailureDetail()).isTrue();
assertThat(lastFooResponse.get().getFailureDetail().hasInterrupted()).isTrue();
assertThat(lastFooResponse.get().getFailureDetail().getInterrupted().getCode())
.isEqualTo(Code.INTERRUPTED);
}
@Test
public void testMultiPreeempt() throws Exception {
String firstCommandArg = "Foo";
String secondCommandArg = "Bar";
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter)
throws InterruptedException {
if (args.contains(firstCommandArg)) {
while (true) {
try {
Thread.sleep(TestUtils.WAIT_TIMEOUT_MILLISECONDS);
} catch (InterruptedException e) {
return BlazeCommandResult.failureDetail(
FailureDetail.newBuilder()
.setInterrupted(Interrupted.newBuilder().setCode(Code.INTERRUPTED))
.build());
}
}
} else {
return BlazeCommandResult.success();
}
}
};
createServer(dispatcher);
CountDownLatch gotFoo = new CountDownLatch(1);
AtomicReference<RunResponse> lastFooResponse = new AtomicReference<>();
AtomicReference<RunResponse> lastBarResponse = new AtomicReference<>();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
createPreemptibleRequest(firstCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
gotFoo.countDown();
lastFooResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
gotFoo.await();
CountDownLatch gotBar = new CountDownLatch(1);
stub.run(
createPreemptibleRequest(secondCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
gotBar.countDown();
lastBarResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
gotBar.await();
server.shutdown();
server.awaitTermination();
assertThat(lastBarResponse.get().getFinished()).isTrue();
assertThat(lastBarResponse.get().getExitCode()).isEqualTo(0);
assertThat(lastFooResponse.get().getFinished()).isTrue();
assertThat(lastFooResponse.get().getExitCode()).isEqualTo(8);
assertThat(lastFooResponse.get().hasFailureDetail()).isTrue();
assertThat(lastFooResponse.get().getFailureDetail().hasInterrupted()).isTrue();
assertThat(lastFooResponse.get().getFailureDetail().getInterrupted().getCode())
.isEqualTo(Code.INTERRUPTED);
}
@Test
public void testNoPreeempt() throws Exception {
String firstCommandArg = "Foo";
String secondCommandArg = "Bar";
CountDownLatch fooBlocked = new CountDownLatch(1);
CountDownLatch fooProceed = new CountDownLatch(1);
CountDownLatch barBlocked = new CountDownLatch(1);
CountDownLatch barProceed = new CountDownLatch(1);
CommandDispatcher dispatcher =
new CommandDispatcher() {
@Override
public BlazeCommandResult exec(
InvocationPolicy invocationPolicy,
List<String> args,
OutErr outErr,
LockingMode lockingMode,
UiVerbosity uiVerbosity,
String clientDescription,
long firstContactTimeMillis,
Optional<List<Pair<String, String>>> startupOptionsTaggedWithBazelRc,
List<Any> commandExtensions,
CommandExtensionReporter commandExtensionReporter)
throws InterruptedException {
if (args.contains(firstCommandArg)) {
fooBlocked.countDown();
fooProceed.await();
} else {
barBlocked.countDown();
barProceed.await();
}
return BlazeCommandResult.success();
}
};
createServer(dispatcher);
AtomicReference<RunResponse> lastFooResponse = new AtomicReference<>();
AtomicReference<RunResponse> lastBarResponse = new AtomicReference<>();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
createRequest(firstCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
lastFooResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
fooBlocked.await();
stub.run(
createRequest(secondCommandArg),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
lastBarResponse.set(value);
}
@Override
public void onError(Throwable t) {}
@Override
public void onCompleted() {}
});
barBlocked.await();
fooProceed.countDown();
barProceed.countDown();
server.shutdown();
server.awaitTermination();
assertThat(lastFooResponse.get().getFinished()).isTrue();
assertThat(lastFooResponse.get().getExitCode()).isEqualTo(0);
assertThat(lastBarResponse.get().getFinished()).isTrue();
assertThat(lastBarResponse.get().getExitCode()).isEqualTo(0);
}
@Test
public void testFlowControl() throws Exception {
CountDownLatch serverDone = new CountDownLatch(1);
CountDownLatch clientBlocks = new CountDownLatch(1);
CountDownLatch clientUnblocks = new CountDownLatch(1);
CountDownLatch clientDone = new CountDownLatch(1);
AtomicInteger sentCount = new AtomicInteger();
AtomicInteger receiveCount = new AtomicInteger();
CommandServerGrpc.CommandServerImplBase serverImpl =
new CommandServerGrpc.CommandServerImplBase() {
@Override
public void run(RunRequest request, StreamObserver<RunResponse> observer) {
ServerCallStreamObserver<RunResponse> serverCallStreamObserver =
(ServerCallStreamObserver<RunResponse>) observer;
BlockingStreamObserver<RunResponse> blockingStreamObserver =
new BlockingStreamObserver<>(serverCallStreamObserver);
Thread t =
new Thread(
() -> {
RunResponse response =
RunResponse.newBuilder()
.setStandardOutput(ByteString.copyFrom(new byte[1024]))
.build();
for (int i = 0; i < 100; i++) {
blockingStreamObserver.onNext(response);
sentCount.incrementAndGet();
}
blockingStreamObserver.onCompleted();
serverDone.countDown();
});
t.start();
}
};
String uniqueName = InProcessServerBuilder.generateName();
server =
InProcessServerBuilder.forName(uniqueName)
.addService(serverImpl)
.executor(Executors.newFixedThreadPool(4))
.build()
.start();
channel =
InProcessChannelBuilder.forName(uniqueName)
.executor(Executors.newFixedThreadPool(4))
.build();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
RunRequest.getDefaultInstance(),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
if (sentCount.get() >= 3) {
clientBlocks.countDown();
try {
clientUnblocks.await();
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
receiveCount.incrementAndGet();
}
@Override
public void onError(Throwable t) {
throw new IllegalStateException(t);
}
@Override
public void onCompleted() {
clientDone.countDown();
}
});
clientBlocks.await();
Thread.sleep(10);
assertThat(sentCount.get()).isLessThan(5);
clientUnblocks.countDown();
serverDone.await();
clientDone.await();
server.shutdown();
server.awaitTermination();
}
@Test
public void testFlowControlClientCancel() throws Exception {
CountDownLatch serverDone = new CountDownLatch(1);
CountDownLatch clientDone = new CountDownLatch(1);
AtomicInteger sentCount = new AtomicInteger();
AtomicInteger receiveCount = new AtomicInteger();
CommandServerGrpc.CommandServerImplBase serverImpl =
new CommandServerGrpc.CommandServerImplBase() {
@Override
public void run(RunRequest request, StreamObserver<RunResponse> observer) {
ServerCallStreamObserver<RunResponse> serverCallStreamObserver =
(ServerCallStreamObserver<RunResponse>) observer;
BlockingStreamObserver<RunResponse> blockingStreamObserver =
new BlockingStreamObserver<>(serverCallStreamObserver);
Thread t =
new Thread(
() -> {
RunResponse response =
RunResponse.newBuilder()
.setStandardOutput(ByteString.copyFrom(new byte[1024]))
.build();
for (int i = 0; i < 100; i++) {
blockingStreamObserver.onNext(response);
sentCount.incrementAndGet();
}
assertThat(Thread.currentThread().isInterrupted()).isTrue();
blockingStreamObserver.onCompleted();
serverDone.countDown();
});
t.start();
}
};
String uniqueName = InProcessServerBuilder.generateName();
server =
InProcessServerBuilder.forName(uniqueName)
.addService(serverImpl)
.executor(Executors.newFixedThreadPool(4))
.build()
.start();
channel =
InProcessChannelBuilder.forName(uniqueName)
.executor(Executors.newFixedThreadPool(4))
.build();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
RunRequest.getDefaultInstance(),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
if (receiveCount.get() > 3) {
channel.shutdownNow();
}
receiveCount.incrementAndGet();
}
@Override
public void onError(Throwable t) {
clientDone.countDown();
}
@Override
public void onCompleted() {
clientDone.countDown();
}
});
serverDone.await();
clientDone.await();
server.shutdown();
server.awaitTermination();
}
@Test
public void testInterruptFlowControl() throws Exception {
CountDownLatch serverDone = new CountDownLatch(1);
CountDownLatch clientDone = new CountDownLatch(1);
AtomicInteger sentCount = new AtomicInteger();
AtomicInteger receiveCount = new AtomicInteger();
CommandServerGrpc.CommandServerImplBase serverImpl =
new CommandServerGrpc.CommandServerImplBase() {
@Override
public void run(RunRequest request, StreamObserver<RunResponse> observer) {
ServerCallStreamObserver<RunResponse> serverCallStreamObserver =
(ServerCallStreamObserver<RunResponse>) observer;
BlockingStreamObserver<RunResponse> blockingStreamObserver =
new BlockingStreamObserver<>(serverCallStreamObserver);
Thread t =
new Thread(
() -> {
RunResponse response =
RunResponse.newBuilder()
.setStandardOutput(ByteString.copyFrom(new byte[1024]))
.build();
int sent = 0;
while (serverCallStreamObserver.isReady()) {
blockingStreamObserver.onNext(response);
sent++;
}
sentCount.set(sent);
Thread.currentThread().interrupt();
for (int i = 0; i < 10; i++) {
blockingStreamObserver.onNext(response);
sentCount.incrementAndGet();
}
blockingStreamObserver.onCompleted();
serverDone.countDown();
});
t.start();
}
};
String uniqueName = InProcessServerBuilder.generateName();
server =
InProcessServerBuilder.forName(uniqueName)
.addService(serverImpl)
.executor(Executors.newFixedThreadPool(4))
.build()
.start();
channel =
InProcessChannelBuilder.forName(uniqueName)
.executor(Executors.newFixedThreadPool(4))
.build();
CommandServerStub stub = CommandServerGrpc.newStub(channel);
stub.run(
RunRequest.getDefaultInstance(),
new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
if (sentCount.get() == 0) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
receiveCount.incrementAndGet();
}
@Override
public void onError(Throwable t) {
throw new IllegalStateException(t);
}
@Override
public void onCompleted() {
clientDone.countDown();
}
});
serverDone.await();
clientDone.await();
assertThat(sentCount.get()).isEqualTo(receiveCount.get());
server.shutdown();
server.awaitTermination();
}
private static StreamObserver<RunResponse> createResponseObserver(
List<RunResponse> responses, CountDownLatch done) {
return new StreamObserver<RunResponse>() {
@Override
public void onNext(RunResponse value) {
responses.add(value);
}
@Override
public void onError(Throwable t) {
done.countDown();
}
@Override
public void onCompleted() {
done.countDown();
}
};
}
private static CommandDispatcher throwingDispatcher() {
return (invocationPolicy,
args,
outErr,
lockingMode,
uiVerbosity,
clientDescription,
firstContactTimeMillis,
startupOptionsTaggedWithBazelRc,
commandExtensions,
commandExtensionReporter) -> {
throw new IllegalStateException("Command exec not expected");
};
}
} | cancelRequest | java | bazel |
package org.elasticsearch.xpack.esql.core.expression.predicate;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.FoldContext;
import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.esql.core.tree.Source;
import java.util.Objects;
public abstract class BinaryPredicate<T, U, R, F extends PredicateBiFunction<T, U, R>> extends BinaryScalarFunction {
private final F function;
protected BinaryPredicate(Source [MASK], Expression left, Expression right, F function) {
super([MASK], left, right);
this.function = function;
}
@SuppressWarnings("unchecked")
@Override
public R fold(FoldContext ctx) {
return function().apply((T) left().fold(ctx), (U) right().fold(ctx));
}
@Override
public int hashCode() {
return Objects.hash(left(), right(), function.symbol());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BinaryPredicate<?, ?, ?, ?> other = (BinaryPredicate<?, ?, ?, ?>) obj;
return Objects.equals(symbol(), other.symbol()) && Objects.equals(left(), other.left()) && Objects.equals(right(), other.right());
}
public String symbol() {
return function.symbol();
}
public F function() {
return function;
}
@Override
public String nodeString() {
return left().nodeString() + " " + symbol() + " " + right().nodeString();
}
} | source | java | elasticsearch |
package org.elasticsearch.xpack.inference.services.ibmwatsonx.action;
import org.elasticsearch.xpack.inference.external.action.ExecutableAction;
import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel;
import org.elasticsearch.xpack.inference.services.ibmwatsonx.rerank.IbmWatsonxRerankModel;
import java.util.Map;
public interface IbmWatsonxActionVisitor {
ExecutableAction create(IbmWatsonxEmbeddingsModel [MASK], Map<String, Object> taskSettings);
ExecutableAction create(IbmWatsonxRerankModel [MASK], Map<String, Object> taskSettings);
} | model | java | elasticsearch |
package org.springframework.boot.buildpack.platform.docker.configuration;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import org.springframework.boot.buildpack.platform.json.AbstractJsonTests;
import org.springframework.util.StreamUtils;
class DockerRegistryTokenAuthenticationTests extends AbstractJsonTests {
@Test
void createAuthHeaderReturnsEncodedHeader() throws IOException, JSONException {
DockerRegistryTokenAuthentication [MASK] = new DockerRegistryTokenAuthentication("tokenvalue");
String header = [MASK].getAuthHeader();
String expectedJson = StreamUtils.copyToString(getContent("[MASK]-token.json"), StandardCharsets.UTF_8);
JSONAssert.assertEquals(expectedJson, new String(Base64.getUrlDecoder().decode(header)), true);
}
} | auth | java | spring-boot |
package sun.nio.ch;
import java.nio.channels.*;
import java.util.concurrent.*;
import java.io.IOException;
import java.io.FileDescriptor;
import java.net.InetSocketAddress;
import java.util.concurrent.atomic.AtomicBoolean;
import java.security.AccessControlContext;
import dalvik.system.CloseGuard;
import libcore.io.OsConstants;
class UnixAsynchronousServerSocketChannelImpl
extends AsynchronousServerSocketChannelImpl
implements Port.PollableChannel
{
private final static NativeDispatcher nd = new SocketDispatcher();
private final Port port;
private final int fdVal;
private final AtomicBoolean accepting = new AtomicBoolean();
private void enableAccept() {
accepting.set(false);
}
private final Object updateLock = new Object();
private boolean acceptPending;
private CompletionHandler<AsynchronousSocketChannel,Object> acceptHandler;
private Object acceptAttachment;
private PendingFuture<AsynchronousSocketChannel,Object> acceptFuture;
private AccessControlContext acceptAcc;
private final CloseGuard guard = CloseGuard.get();
UnixAsynchronousServerSocketChannelImpl(Port port)
throws IOException
{
super(port);
try {
IOUtil.configureBlocking(fd, false);
} catch (IOException x) {
nd.close(fd);
throw x;
}
this.port = port;
this.fdVal = IOUtil.fdVal(fd);
port.register(fdVal, this);
guard.open("close");
}
@Override
void implClose() throws IOException {
guard.close();
port.unregister(fdVal);
nd.close(fd);
CompletionHandler<AsynchronousSocketChannel,Object> handler;
Object att;
PendingFuture<AsynchronousSocketChannel,Object> future;
synchronized (updateLock) {
if (!acceptPending)
return;
acceptPending = false;
handler = acceptHandler;
att = acceptAttachment;
future = acceptFuture;
}
AsynchronousCloseException x = new AsynchronousCloseException();
x.setStackTrace(new StackTraceElement[0]);
if (handler == null) {
future.setFailure(x);
} else {
Invoker.invokeIndirectly(this, handler, att, null, x);
}
}
protected void finalize() throws Throwable {
try {
if (guard != null) {
guard.warnIfOpen();
}
close();
} finally {
super.finalize();
}
}
@Override
public AsynchronousChannelGroupImpl group() {
return port;
}
@Override
public void onEvent(int events, boolean mayInvokeDirect) {
synchronized (updateLock) {
if (!acceptPending)
return;
acceptPending = false;
}
FileDescriptor newfd = new FileDescriptor();
InetSocketAddress[] [MASK] = new InetSocketAddress[1];
Throwable exc = null;
try {
begin();
int n = accept(this.fd, newfd, [MASK]);
if (n == IOStatus.UNAVAILABLE) {
synchronized (updateLock) {
acceptPending = true;
}
port.startPoll(fdVal, Net.POLLIN);
return;
}
} catch (Throwable x) {
if (x instanceof ClosedChannelException)
x = new AsynchronousCloseException();
exc = x;
} finally {
end();
}
AsynchronousSocketChannel child = null;
if (exc == null) {
try {
child = finishAccept(newfd, [MASK][0], acceptAcc);
} catch (Throwable x) {
if (!(x instanceof IOException) && !(x instanceof SecurityException))
x = new IOException(x);
exc = x;
}
}
CompletionHandler<AsynchronousSocketChannel,Object> handler = acceptHandler;
Object att = acceptAttachment;
PendingFuture<AsynchronousSocketChannel,Object> future = acceptFuture;
enableAccept();
if (handler == null) {
future.setResult(child, exc);
if (child != null && future.isCancelled()) {
try {
child.close();
} catch (IOException ignore) { }
}
} else {
Invoker.invoke(this, handler, att, child, exc);
}
}
private AsynchronousSocketChannel finishAccept(FileDescriptor newfd,
final InetSocketAddress remote,
AccessControlContext acc)
throws IOException, SecurityException
{
AsynchronousSocketChannel ch = null;
try {
ch = new UnixAsynchronousSocketChannelImpl(port, newfd, remote);
} catch (IOException x) {
nd.close(newfd);
throw x;
}
return ch;
}
@Override
Future<AsynchronousSocketChannel> implAccept(Object att,
CompletionHandler<AsynchronousSocketChannel,Object> handler)
{
if (!isOpen()) {
Throwable e = new ClosedChannelException();
if (handler == null) {
return CompletedFuture.withFailure(e);
} else {
Invoker.invoke(this, handler, att, null, e);
return null;
}
}
if (localAddress == null)
throw new NotYetBoundException();
if (isAcceptKilled())
throw new RuntimeException("Accept not allowed due cancellation");
if (!accepting.compareAndSet(false, true))
throw new AcceptPendingException();
FileDescriptor newfd = new FileDescriptor();
InetSocketAddress[] [MASK] = new InetSocketAddress[1];
Throwable exc = null;
try {
begin();
int n = accept(this.fd, newfd, [MASK]);
if (n == IOStatus.UNAVAILABLE) {
PendingFuture<AsynchronousSocketChannel,Object> result = null;
synchronized (updateLock) {
if (handler == null) {
this.acceptHandler = null;
result = new PendingFuture<AsynchronousSocketChannel,Object>(this);
this.acceptFuture = result;
} else {
this.acceptHandler = handler;
this.acceptAttachment = att;
}
this.acceptAcc = null;
this.acceptPending = true;
}
port.startPoll(fdVal, Net.POLLIN);
return result;
}
} catch (Throwable x) {
if (x instanceof ClosedChannelException)
x = new AsynchronousCloseException();
exc = x;
} finally {
end();
}
AsynchronousSocketChannel child = null;
if (exc == null) {
try {
child = finishAccept(newfd, [MASK][0], null);
} catch (Throwable x) {
exc = x;
}
}
enableAccept();
if (handler == null) {
return CompletedFuture.withResult(child, exc);
} else {
Invoker.invokeIndirectly(this, handler, att, child, exc);
return null;
}
}
private int accept(FileDescriptor ssfd, FileDescriptor newfd,
InetSocketAddress[] [MASK])
throws IOException
{
return accept0(ssfd, newfd, [MASK]);
}
private static native void initIDs();
private native int accept0(FileDescriptor ssfd, FileDescriptor newfd,
InetSocketAddress[] [MASK])
throws IOException;
static {
initIDs();
}
} | isaa | java | j2objc |
package proguard.configuration;
import proguard.*;
import proguard.classfile.*;
import proguard.classfile.attribute.*;
import proguard.classfile.attribute.visitor.*;
import proguard.classfile.editor.*;
import proguard.classfile.[MASK].Instruction;
import proguard.classfile.[MASK].visitor.InstructionVisitor;
import proguard.classfile.util.*;
import proguard.classfile.visitor.*;
import proguard.io.*;
import proguard.optimize.peephole.*;
import proguard.util.MultiValueMap;
import java.io.IOException;
import static proguard.classfile.util.ClassUtil.internalClassName;
public class ConfigurationLoggingAdder
extends SimplifiedVisitor
implements
InstructionVisitor
{
private final Configuration configuration;
private MultiValueMap<String, String> injectedClassMap;
public ConfigurationLoggingAdder(Configuration configuration)
{
this.configuration = configuration;
}
public void execute(ClassPool programClassPool,
ClassPool libraryClassPool,
MultiValueMap<String, String> injectedClassMap )
{
ClassReader classReader =
new ClassReader(false, false, false, null,
new MultiClassVisitor(
new ClassPoolFiller(programClassPool),
new ClassReferenceInitializer(programClassPool, libraryClassPool),
new ClassSubHierarchyInitializer()
));
try
{
classReader.read(new ClassPathDataEntry(ConfigurationLogger.MethodSignature.class));
classReader.read(new ClassPathDataEntry(ConfigurationLogger.class));
}
catch (IOException e)
{
throw new RuntimeException(e);
}
ConfigurationLoggingInstructionSequenceConstants constants =
new ConfigurationLoggingInstructionSequenceConstants(programClassPool,
libraryClassPool);
BranchTargetFinder branchTargetFinder = new BranchTargetFinder();
CodeAttributeEditor codeAttributeEditor = new CodeAttributeEditor();
this.injectedClassMap = injectedClassMap;
programClassPool.classesAccept(
new ClassNameFilter("!proguard/**",
new AllMethodVisitor(
new AllAttributeVisitor(
new PeepholeOptimizer(branchTargetFinder, codeAttributeEditor,
new ConfigurationLoggingInstructionSequencesReplacer(constants.CONSTANTS,
constants.RESOURCE,
branchTargetFinder,
codeAttributeEditor,
this))))));
}
public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction [MASK])
{
injectedClassMap.put(clazz.getName(), internalClassName(ConfigurationLogger.class.getName()));
injectedClassMap.put(clazz.getName(), internalClassName(ConfigurationLogger.MethodSignature.class.getName()));
}
} | instruction | java | bazel |
package com.google.devtools.j2objc.translate;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.j2objc.ast.AnnotationTypeDeclaration;
import com.google.devtools.j2objc.ast.ClassInstanceCreation;
import com.google.devtools.j2objc.ast.CompilationUnit;
import com.google.devtools.j2objc.ast.ConstructorInvocation;
import com.google.devtools.j2objc.ast.CreationReference;
import com.google.devtools.j2objc.ast.EnumDeclaration;
import com.google.devtools.j2objc.ast.Expression;
import com.google.devtools.j2objc.ast.ExpressionMethodReference;
import com.google.devtools.j2objc.ast.FieldAccess;
import com.google.devtools.j2objc.ast.FunctionalExpression;
import com.google.devtools.j2objc.ast.LambdaExpression;
import com.google.devtools.j2objc.ast.MethodDeclaration;
import com.google.devtools.j2objc.ast.MethodInvocation;
import com.google.devtools.j2objc.ast.Name;
import com.google.devtools.j2objc.ast.QualifiedName;
import com.google.devtools.j2objc.ast.RecordDeclaration;
import com.google.devtools.j2objc.ast.SimpleName;
import com.google.devtools.j2objc.ast.SingleVariableDeclaration;
import com.google.devtools.j2objc.ast.SuperConstructorInvocation;
import com.google.devtools.j2objc.ast.SuperFieldAccess;
import com.google.devtools.j2objc.ast.SuperMethodInvocation;
import com.google.devtools.j2objc.ast.SuperMethodReference;
import com.google.devtools.j2objc.ast.ThisExpression;
import com.google.devtools.j2objc.ast.TreeUtil;
import com.google.devtools.j2objc.ast.Type;
import com.google.devtools.j2objc.ast.TypeDeclaration;
import com.google.devtools.j2objc.ast.UnitTreeVisitor;
import com.google.devtools.j2objc.ast.VariableDeclaration;
import com.google.devtools.j2objc.ast.VariableDeclarationFragment;
import com.google.devtools.j2objc.util.CaptureInfo;
import com.google.devtools.j2objc.util.ElementUtil;
import com.google.devtools.j2objc.util.TypeUtil;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.TypeMirror;
@SuppressWarnings("UngroupedOverloads")
public class OuterReferenceResolver extends UnitTreeVisitor {
private final CaptureInfo captureInfo;
private Scope topScope = null;
public OuterReferenceResolver(CompilationUnit unit) {
super(unit);
this.captureInfo = unit.getEnv().captureInfo();
}
private enum ScopeKind { CLASS, LAMBDA, METHOD }
private class Scope {
private final ScopeKind kind;
private final Scope outer;
private final Scope outerClass;
private final TypeElement type;
private final Set<Element> inheritedScope;
private final boolean initializingContext;
private final Set<VariableElement> declaredVars = new HashSet<>();
private List<Runnable> onExit = new ArrayList<>();
private final Queue<Runnable> onOuterParam;
private int constructorCount = 0;
private int constructorsNotNeedingSuperOuterScope = 0;
private Scope(Scope outer, TypeElement type) {
kind = ElementUtil.isLambda(type) ? ScopeKind.LAMBDA : ScopeKind.CLASS;
this.outer = outer;
outerClass = firstClassScope(outer);
this.type = type;
ImmutableSet.Builder<Element> inheritedScopeBuilder = ImmutableSet.builder();
if (kind == ScopeKind.CLASS) {
typeUtil.visitTypeHierarchy(type.asType(), inheritedType -> {
inheritedScopeBuilder.add(inheritedType.asElement());
return true;
});
}
this.inheritedScope = inheritedScopeBuilder.build();
this.initializingContext = kind == ScopeKind.CLASS;
this.onOuterParam = new LinkedList<>();
}
private Scope(Scope outer, ExecutableElement method) {
kind = ScopeKind.METHOD;
this.outer = outer;
outerClass = outer.outerClass;
type = outer.type;
inheritedScope = outer.inheritedScope;
initializingContext = ElementUtil.isConstructor(method);
onOuterParam = outer.onOuterParam;
}
private boolean isInitializing() {
return initializingContext && this == peekScope();
}
}
private Scope peekScope() {
assert topScope != null;
return topScope;
}
private static Scope firstClassScope(Scope scope) {
while (scope != null && scope.kind != ScopeKind.CLASS) {
scope = scope.outer;
}
return scope;
}
private Scope findScopeForType(TypeElement type) {
Scope scope = peekScope();
while (scope != null) {
if (scope.kind != ScopeKind.METHOD && type.equals(scope.type)) {
return scope;
}
scope = scope.outer;
}
return null;
}
private Runnable captureCurrentScope(Runnable runnable) {
Scope capturedScope = peekScope();
return new Runnable() {
@Override
public void run() {
Scope saved = topScope;
topScope = capturedScope;
runnable.run();
topScope = saved;
}
};
}
private void onExitScope(TypeElement type, Runnable runnable) {
Scope scope = findScopeForType(type);
if (scope != null) {
scope.onExit.add(captureCurrentScope(runnable));
} else {
runnable.run();
}
}
private void whenNeedsOuterParam(TypeElement type, Runnable runnable) {
if (captureInfo.needsOuterParam(type)) {
runnable.run();
} else if (ElementUtil.isLocal(type)) {
Scope scope = findScopeForType(type);
if (scope != null) {
scope.onOuterParam.add(captureCurrentScope(runnable));
}
}
}
private VariableElement getOrCreateOuterVar(Scope scope) {
while (!scope.onOuterParam.isEmpty()) {
scope.onOuterParam.remove().run();
}
return scope.isInitializing() ? captureInfo.getOrCreateOuterParam(scope.type)
: captureInfo.getOrCreateOuterField(scope.type);
}
private VariableElement getOrCreateCaptureVar(VariableElement var, Scope scope) {
return scope.isInitializing() ? captureInfo.getOrCreateCaptureParam(var, scope.type)
: captureInfo.getOrCreateCaptureField(var, scope.type);
}
private Name getOuterPath(TypeElement type) {
Name path = null;
for (Scope scope = peekScope(); !type.equals(scope.type); scope = scope.outerClass) {
path = Name.newName(path, getOrCreateOuterVar(scope));
}
return path;
}
private Name getOuterPathInherited(TypeElement type) {
Name path = null;
for (Scope scope = peekScope(); !scope.inheritedScope.contains(type);
scope = scope.outerClass) {
path = Name.newName(path, getOrCreateOuterVar(scope));
}
return path;
}
private Name getPathForField(VariableElement var, TypeMirror type) {
Name path = getOuterPathInherited((TypeElement) var.getEnclosingElement());
if (path != null) {
path = Name.newName(path, var, type);
}
return path;
}
private Expression getPathForLocalVar(VariableElement var) {
Name path = null;
Scope scope = peekScope();
if (scope.declaredVars.contains(var)) {
return path;
}
if (var.getConstantValue() != null) {
return TreeUtil.newLiteral(var.getConstantValue(), typeUtil);
}
Scope lastScope = scope;
while (!(scope = scope.outer).declaredVars.contains(var)) {
if (scope == lastScope.outerClass) {
path = Name.newName(path, getOrCreateOuterVar(lastScope));
lastScope = scope;
}
}
return Name.newName(path, getOrCreateCaptureVar(var, lastScope));
}
private void pushType(TypeElement type) {
topScope = new Scope(topScope, type);
}
private void popType() {
Scope [MASK] = peekScope();
topScope = [MASK].outer;
for (Runnable runnable : [MASK].onExit) {
runnable.run();
}
}
private void addSuperOuterPath(TypeDeclaration node) {
TypeElement superclass = ElementUtil.getSuperclass(node.getTypeElement());
if (superclass != null && captureInfo.needsOuterParam(superclass)) {
node.setSuperOuter(getOuterPathInherited(ElementUtil.getDeclaringClass(superclass)));
}
}
private void addCaptureArgs(TypeElement type, List<Expression> args) {
for (VariableElement var : captureInfo.getCapturedVars(type)) {
Expression path = getPathForLocalVar(var);
if (path == null) {
path = new SimpleName(var);
}
args.add(path);
}
}
@Override
public boolean visit(TypeDeclaration node) {
pushType(node.getTypeElement());
return true;
}
@Override
public void endVisit(TypeDeclaration node) {
Scope [MASK] = peekScope();
if ([MASK].constructorCount == 0) {
[MASK].constructorCount++;
}
if ([MASK].constructorCount > [MASK].constructorsNotNeedingSuperOuterScope) {
addSuperOuterPath(node);
}
addCaptureArgs(ElementUtil.getSuperclass(node.getTypeElement()), node.getSuperCaptureArgs());
popType();
}
@Override
public boolean visit(EnumDeclaration node) {
pushType(node.getTypeElement());
return true;
}
@Override
public void endVisit(EnumDeclaration node) {
popType();
}
@Override
public boolean visit(AnnotationTypeDeclaration node) {
pushType(node.getTypeElement());
return true;
}
@Override
public void endVisit(AnnotationTypeDeclaration node) {
popType();
}
@Override
public boolean visit(RecordDeclaration node) {
pushType(node.getTypeElement());
return true;
}
@Override
public void endVisit(RecordDeclaration node) {
popType();
}
private void endVisitFunctionalExpression(FunctionalExpression node) {
TypeElement typeElement = node.getTypeElement();
if (captureInfo.needsOuterParam(typeElement)) {
node.setLambdaOuterArg(getOuterPathInherited(ElementUtil.getDeclaringClass(typeElement)));
}
addCaptureArgs(typeElement, node.getLambdaCaptureArgs());
}
@Override
public boolean visit(LambdaExpression node) {
pushType(node.getTypeElement());
return true;
}
@Override
public void endVisit(LambdaExpression node) {
popType();
endVisitFunctionalExpression(node);
}
@Override
public void endVisit(ExpressionMethodReference node) {
Expression target = node.getExpression();
if (!ElementUtil.isStatic(node.getExecutableElement()) && isValue(target)) {
captureInfo.addMethodReferenceReceiver(node.getTypeElement(), target.getTypeMirror());
}
}
private static boolean isValue(Expression expr) {
return !(expr instanceof Name) || ElementUtil.isVariable(((Name) expr).getElement());
}
@Override
public boolean visit(FieldAccess node) {
node.getExpression().accept(this);
return false;
}
@Override
public boolean visit(SuperFieldAccess node) {
VariableElement var = node.getVariableElement();
Name path = getPathForField(var, node.getTypeMirror());
if (path != null) {
node.replaceWith(path);
}
return false;
}
@Override
public boolean visit(QualifiedName node) {
node.getQualifier().accept(this);
return false;
}
@Override
public boolean visit(SimpleName node) {
VariableElement var = TreeUtil.getVariableElement(node);
if (var != null) {
Expression path = null;
if (ElementUtil.isInstanceVar(var)) {
path = getPathForField(var, node.getTypeMirror());
} else if (!var.getKind().isField()) {
path = getPathForLocalVar(var);
}
if (path != null) {
node.replaceWith(path);
}
}
return true;
}
@Override
public boolean visit(ThisExpression node) {
Name qualifier = TreeUtil.remove(node.getQualifier());
if (qualifier != null) {
Name path = getOuterPath((TypeElement) qualifier.getElement());
if (path != null) {
node.replaceWith(path);
}
} else {
Scope [MASK] = peekScope();
if (ElementUtil.isLambda([MASK].type)) {
Name path = getOuterPath(ElementUtil.getDeclaringClass([MASK].type));
assert path != null : "this keyword within a lambda should have a non-empty path";
node.replaceWith(path);
}
}
return true;
}
@Override
public void endVisit(MethodInvocation node) {
ExecutableElement method = node.getExecutableElement();
if (node.getExpression() == null && !ElementUtil.isStatic(method)) {
node.setExpression(getOuterPathInherited(ElementUtil.getDeclaringClass(method)));
}
}
private Name getSuperPath(Name qualifier, ExecutableElement executableElement) {
if (ElementUtil.isDefault(executableElement)) {
qualifier = null;
}
if (qualifier != null) {
return getOuterPath((TypeElement) qualifier.getElement());
} else {
Scope [MASK] = peekScope();
if (ElementUtil.isLambda([MASK].type)) {
return getOuterPath(ElementUtil.getDeclaringClass([MASK].type));
}
}
return null;
}
@Override
public void endVisit(SuperMethodInvocation node) {
node.setReceiver(getSuperPath(node.getQualifier(), node.getExecutableElement()));
node.setQualifier(null);
}
@Override
public void endVisit(SuperMethodReference node) {
TypeElement lambdaType = node.getTypeElement();
pushType(lambdaType);
Name qualifier = TreeUtil.remove(node.getQualifier());
node.setReceiver(getSuperPath(qualifier, node.getExecutableElement()));
popType();
endVisitFunctionalExpression(node);
}
@Override
public void endVisit(ClassInstanceCreation node) {
TypeElement typeElement = (TypeElement) node.getExecutableElement().getEnclosingElement();
if (node.getExpression() == null) {
whenNeedsOuterParam(typeElement, () -> {
node.setExpression(getOuterPathInherited(ElementUtil.getDeclaringClass(typeElement)));
});
}
if (ElementUtil.isLocal(typeElement)) {
onExitScope(typeElement, () -> {
addCaptureArgs(typeElement, node.getCaptureArgs());
});
}
}
@Override
public void endVisit(CreationReference node) {
Type typeNode = node.getType();
TypeMirror creationType = typeNode.getTypeMirror();
if (TypeUtil.isArray(creationType)) {
return;
}
TypeElement lambdaType = node.getTypeElement();
pushType(lambdaType);
TypeElement creationElement = TypeUtil.asTypeElement(creationType);
whenNeedsOuterParam(creationElement, () -> {
TypeElement enclosingTypeElement = ElementUtil.getDeclaringClass(creationElement);
node.setCreationOuterArg(getOuterPathInherited(enclosingTypeElement));
});
if (ElementUtil.isLocal(creationElement)) {
onExitScope(creationElement, () -> {
addCaptureArgs(creationElement, node.getCreationCaptureArgs());
});
}
popType();
endVisitFunctionalExpression(node);
}
private boolean visitVariableDeclaration(VariableDeclaration node) {
peekScope().declaredVars.add(node.getVariableElement());
return true;
}
@Override
public boolean visit(VariableDeclarationFragment node) {
return visitVariableDeclaration(node);
}
@Override
public boolean visit(SingleVariableDeclaration node) {
return visitVariableDeclaration(node);
}
@Override
public boolean visit(MethodDeclaration node) {
Scope [MASK] = peekScope();
ExecutableElement elem = node.getExecutableElement();
if (ElementUtil.isConstructor(elem)) {
[MASK].constructorCount++;
}
topScope = new Scope([MASK], elem);
return true;
}
@Override
public void endVisit(MethodDeclaration node) {
topScope = topScope.outer;
}
@Override
public void endVisit(ConstructorInvocation node) {
firstClassScope(peekScope()).constructorsNotNeedingSuperOuterScope++;
}
@Override
public void endVisit(SuperConstructorInvocation node) {
if (node.getExpression() != null) {
firstClassScope(peekScope()).constructorsNotNeedingSuperOuterScope++;
}
}
} | currentScope | java | j2objc |
package org.elasticsearch.xpack.spatial.datageneration;
import org.elasticsearch.geo.GeometryTestUtils;
import org.elasticsearch.[MASK].Geometry;
import org.elasticsearch.[MASK].ShapeType;
import org.elasticsearch.logsdb.datageneration.datasource.DataSourceHandler;
import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest;
import org.elasticsearch.logsdb.datageneration.datasource.DataSourceResponse;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.spatial.util.GeoTestUtils;
import java.util.HashMap;
public class ShapeDataSourceHandler implements DataSourceHandler {
@Override
public DataSourceResponse.ShapeGenerator handle(DataSourceRequest.ShapeGenerator request) {
return new DataSourceResponse.ShapeGenerator(this::generateValidShape);
}
@Override
public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) {
if (request.fieldType().equals("shape") == false) {
return null;
}
return new DataSourceResponse.LeafMappingParametersGenerator(() -> {
var map = new HashMap<String, Object>();
map.put("index", ESTestCase.randomBoolean());
map.put("doc_values", ESTestCase.randomBoolean());
if (ESTestCase.randomBoolean()) {
map.put("ignore_malformed", ESTestCase.randomBoolean());
}
return map;
});
}
@Override
public DataSourceResponse.FieldDataGenerator handle(DataSourceRequest.FieldDataGenerator request) {
if (request.fieldType().equals("shape") == false) {
return null;
}
return new DataSourceResponse.FieldDataGenerator(new ShapeFieldDataGenerator(request.dataSource()));
}
private Geometry generateValidShape() {
while (true) {
var [MASK] = GeometryTestUtils.randomGeometryWithoutCircle(0, false);
if ([MASK].type() == ShapeType.ENVELOPE) {
continue;
}
try {
GeoTestUtils.binaryCartesianShapeDocValuesField("f", [MASK]);
return [MASK];
} catch (IllegalArgumentException ignored) {
}
}
}
} | geometry | java | elasticsearch |
package proguard.classfile.attribute.preverification;
import proguard.classfile.*;
import proguard.classfile.attribute.CodeAttribute;
import proguard.classfile.attribute.preverification.visitor.*;
public class FullFrame extends StackMapFrame
{
public int variablesCount;
public VerificationType[] variables;
public int stackCount;
public VerificationType[] stack;
public FullFrame()
{
}
public FullFrame(int [MASK],
VerificationType[] variables,
VerificationType[] stack)
{
this([MASK],
variables.length,
variables,
stack.length,
stack);
}
public FullFrame(int [MASK],
int variablesCount,
VerificationType[] variables,
int stackCount,
VerificationType[] stack)
{
this.u2offsetDelta = [MASK];
this.variablesCount = variablesCount;
this.variables = variables;
this.stackCount = stackCount;
this.stack = stack;
}
public void variablesAccept(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, VerificationTypeVisitor verificationTypeVisitor)
{
for (int index = 0; index < variablesCount; index++)
{
variables[index].variablesAccept(clazz, method, codeAttribute, offset, index, verificationTypeVisitor);
}
}
public void stackAccept(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, VerificationTypeVisitor verificationTypeVisitor)
{
for (int index = 0; index < stackCount; index++)
{
stack[index].stackAccept(clazz, method, codeAttribute, offset, index, verificationTypeVisitor);
}
}
public int getTag()
{
return FULL_FRAME;
}
public void accept(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, StackMapFrameVisitor stackMapFrameVisitor)
{
stackMapFrameVisitor.visitFullFrame(clazz, method, codeAttribute, offset, this);
}
public boolean equals(Object object)
{
if (!super.equals(object))
{
return false;
}
FullFrame other = (FullFrame)object;
if (this.u2offsetDelta != other.u2offsetDelta ||
this.variablesCount != other.variablesCount ||
this.stackCount != other.stackCount)
{
return false;
}
for (int index = 0; index < variablesCount; index++)
{
VerificationType thisType = this.variables[index];
VerificationType otherType = other.variables[index];
if (!thisType.equals(otherType))
{
return false;
}
}
for (int index = 0; index < stackCount; index++)
{
VerificationType thisType = this.stack[index];
VerificationType otherType = other.stack[index];
if (!thisType.equals(otherType))
{
return false;
}
}
return true;
}
public int hashCode()
{
int hashCode = super.hashCode();
for (int index = 0; index < variablesCount; index++)
{
hashCode ^= variables[index].hashCode();
}
for (int index = 0; index < stackCount; index++)
{
hashCode ^= stack[index].hashCode();
}
return hashCode;
}
public String toString()
{
StringBuffer buffer = new StringBuffer(super.toString()).append("Var: ");
for (int index = 0; index < variablesCount; index++)
{
buffer = buffer.append('[')
.append(variables[index].toString())
.append(']');
}
buffer.append(", Stack: ");
for (int index = 0; index < stackCount; index++)
{
buffer = buffer.append('[')
.append(stack[index].toString())
.append(']');
}
return buffer.toString();
}
} | offsetDelta | java | bazel |
package com.tencent.tinker.loader.hotplug;
import android.content.ComponentName;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.os.Build;
import android.os.Bundle;
import android.os.PatternMatcher;
import android.text.TextUtils;
import android.util.Log;
import android.util.Xml;
import com.tencent.tinker.loader.shareutil.SharePatchFileUtil;
import com.tencent.tinker.loader.shareutil.ShareReflectUtil;
import com.tencent.tinker.loader.shareutil.ShareSecurityCheck;
import com.tencent.tinker.loader.shareutil.ShareTinkerLog;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.io.StringReader;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
public final class IncrementComponentManager {
private static final String TAG = "Tinker.IncrementCompMgr";
private static final int TAG_ACTIVITY = 0;
private static final int TAG_SERVICE = 1;
private static final int TAG_PROVIDER = 2;
private static final int TAG_RECEIVER = 3;
private static Context sContext = null;
private static String sPackageName = null;
private static volatile boolean [MASK] = false;
private static final Map<String, ActivityInfo> CLASS_NAME_TO_ACTIVITY_INFO_MAP = new HashMap<>();
private static final Map<String, IntentFilter> CLASS_NAME_TO_INTENT_FILTER_MAP = new HashMap<>();
private static abstract class AttrTranslator<T_RESULT> {
final void translate(Context context, int tagType, XmlPullParser parser, T_RESULT result) {
onInit(context, tagType, parser);
final int attrCount = parser.getAttributeCount();
for (int i = 0; i < attrCount; ++i) {
final String attrPrefix = parser.getAttributePrefix(i);
if (!"android".equals(attrPrefix)) {
continue;
}
final String attrName = parser.getAttributeName(i);
final String attrValue = parser.getAttributeValue(i);
onTranslate(context, tagType, attrName, attrValue, result);
}
}
void onInit(Context context, int tagType, XmlPullParser parser) {
}
abstract void onTranslate(Context context, int tagType, String attrName, String attrValue, T_RESULT result);
}
private static final AttrTranslator<ActivityInfo> ACTIVITY_INFO_ATTR_TRANSLATOR = new AttrTranslator<ActivityInfo>() {
@Override
void onInit(Context context, int tagType, XmlPullParser parser) {
try {
if (tagType == TAG_ACTIVITY
&& (parser.getEventType() != XmlPullParser.START_TAG
|| !"activity".equals(parser.getName()))) {
throw new IllegalStateException("unexpected xml parser state when parsing incremental component manifest.");
}
} catch (XmlPullParserException e) {
throw new IllegalStateException(e);
}
}
@Override
void onTranslate(Context context, int tagType, String attrName, String attrValue, ActivityInfo result) {
if ("name".equals(attrName)) {
if (attrValue.charAt(0) == '.') {
result.name = context.getPackageName() + attrValue;
} else {
result.name = attrValue;
}
} else if ("parentActivityName".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 16) {
if (attrValue.charAt(0) == '.') {
result.parentActivityName = context.getPackageName() + attrValue;
} else {
result.parentActivityName = attrValue;
}
}
} else if ("exported".equals(attrName)) {
result.exported = "true".equalsIgnoreCase(attrValue);
} else if ("launchMode".equals(attrName)) {
result.launchMode = parseLaunchMode(attrValue);
} else if ("theme".equals(attrName)) {
final Resources res = context.getResources();
final String packageName = context.getPackageName();
result.theme = res.getIdentifier(attrValue, "style", packageName);
} else if ("uiOptions".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 14) {
result.uiOptions = Integer.decode(attrValue);
}
} else if ("permission".equals(attrName)) {
result.permission = attrValue;
} else if ("taskAffinity".equals(attrName)) {
result.taskAffinity = attrValue;
} else if ("multiprocess".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_MULTIPROCESS;
} else {
result.flags &= ~ActivityInfo.FLAG_MULTIPROCESS;
}
} else if ("finishOnTaskLaunch".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_FINISH_ON_TASK_LAUNCH;
} else {
result.flags &= ~ActivityInfo.FLAG_FINISH_ON_TASK_LAUNCH;
}
} else if ("clearTaskOnLaunch".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_CLEAR_TASK_ON_LAUNCH;
} else {
result.flags &= ~ActivityInfo.FLAG_CLEAR_TASK_ON_LAUNCH;
}
} else if ("noHistory".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_NO_HISTORY;
} else {
result.flags &= ~ActivityInfo.FLAG_NO_HISTORY;
}
} else if ("alwaysRetainTaskState".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_ALWAYS_RETAIN_TASK_STATE;
} else {
result.flags &= ~ActivityInfo.FLAG_ALWAYS_RETAIN_TASK_STATE;
}
} else if ("stateNotNeeded".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_STATE_NOT_NEEDED;
} else {
result.flags &= ~ActivityInfo.FLAG_STATE_NOT_NEEDED;
}
} else if ("excludeFromRecents".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_EXCLUDE_FROM_RECENTS;
} else {
result.flags &= ~ActivityInfo.FLAG_EXCLUDE_FROM_RECENTS;
}
} else if ("allowTaskReparenting".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_ALLOW_TASK_REPARENTING;
} else {
result.flags &= ~ActivityInfo.FLAG_ALLOW_TASK_REPARENTING;
}
} else if ("finishOnCloseSystemDialogs".equals(attrName)) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_FINISH_ON_CLOSE_SYSTEM_DIALOGS;
} else {
result.flags &= ~ActivityInfo.FLAG_FINISH_ON_CLOSE_SYSTEM_DIALOGS;
}
} else if ("showOnLockScreen".equals(attrName) || "showForAllUsers".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 23) {
final int flag = ShareReflectUtil
.getValueOfStaticIntField(ActivityInfo.class, "FLAG_SHOW_FOR_ALL_USERS", 0);
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= flag;
} else {
result.flags &= ~flag;
}
}
} else if ("immersive".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 18) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_IMMERSIVE;
} else {
result.flags &= ~ActivityInfo.FLAG_IMMERSIVE;
}
}
} else if ("hardwareAccelerated".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 11) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_HARDWARE_ACCELERATED;
} else {
result.flags &= ~ActivityInfo.FLAG_HARDWARE_ACCELERATED;
}
}
} else if ("documentLaunchMode".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
result.documentLaunchMode = Integer.decode(attrValue);
}
} else if ("maxRecents".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
result.maxRecents = Integer.decode(attrValue);
}
} else if ("configChanges".equals(attrName)) {
result.configChanges = Integer.decode(attrValue);
} else if ("windowSoftInputMode".equals(attrName)) {
result.softInputMode = Integer.decode(attrValue);
} else if ("persistableMode".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
result.persistableMode = Integer.decode(attrValue);
}
} else if ("allowEmbedded".equals(attrName)) {
final int flag = ShareReflectUtil
.getValueOfStaticIntField(ActivityInfo.class, "FLAG_ALLOW_EMBEDDED", 0);
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= flag;
} else {
result.flags &= ~flag;
}
} else if ("autoRemoveFromRecents".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_AUTO_REMOVE_FROM_RECENTS;
} else {
result.flags &= ~ActivityInfo.FLAG_AUTO_REMOVE_FROM_RECENTS;
}
}
} else if ("relinquishTaskIdentity".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_RELINQUISH_TASK_IDENTITY;
} else {
result.flags &= ~ActivityInfo.FLAG_RELINQUISH_TASK_IDENTITY;
}
}
} else if ("resumeWhilePausing".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 21) {
if ("true".equalsIgnoreCase(attrValue)) {
result.flags |= ActivityInfo.FLAG_RESUME_WHILE_PAUSING;
} else {
result.flags &= ~ActivityInfo.FLAG_RESUME_WHILE_PAUSING;
}
}
} else if ("screenOrientation".equals(attrName)) {
result.screenOrientation = parseScreenOrientation(attrValue);
} else if ("label".equals(attrName)) {
final String strOrResId = attrValue;
int id = 0;
try {
id = context.getResources().getIdentifier(strOrResId, "string", sPackageName);
} catch (Throwable ignored) {
}
if (id != 0) {
result.labelRes = id;
} else {
result.nonLocalizedLabel = strOrResId;
}
} else if ("icon".equals(attrName)) {
try {
result.icon = context.getResources().getIdentifier(attrValue, null, sPackageName);
} catch (Throwable ignored) {
}
} else if ("banner".equals(attrName)) {
if (Build.VERSION.SDK_INT >= 20) {
try {
result.banner = context.getResources().getIdentifier(attrValue, null, sPackageName);
} catch (Throwable ignored) {
}
}
} else if ("logo".equals(attrName)) {
try {
result.logo = context.getResources().getIdentifier(attrValue, null, sPackageName);
} catch (Throwable ignored) {
}
}
}
private int parseLaunchMode(String attrValue) {
if ("standard".equalsIgnoreCase(attrValue)) {
return ActivityInfo.LAUNCH_MULTIPLE;
} else if ("singleTop".equalsIgnoreCase(attrValue)) {
return ActivityInfo.LAUNCH_SINGLE_TOP;
} else if ("singleTask".equalsIgnoreCase(attrValue)) {
return ActivityInfo.LAUNCH_SINGLE_TASK;
} else if ("singleInstance".equalsIgnoreCase(attrValue)) {
return ActivityInfo.LAUNCH_SINGLE_INSTANCE;
} else {
ShareTinkerLog.w(TAG, "Unknown launchMode: " + attrValue);
return ActivityInfo.LAUNCH_MULTIPLE;
}
}
private int parseScreenOrientation(String attrValue) {
if ("unspecified".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED;
} else if ("behind".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_BEHIND;
} else if ("landscape".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
} else if ("portrait".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
} else if ("reverseLandscape".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
} else if ("reversePortrait".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
} else if ("sensorLandscape".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE;
} else if ("sensorPortrait".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT;
} else if ("sensor".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_SENSOR;
} else if ("fullSensor".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR;
} else if ("nosensor".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_NOSENSOR;
} else if ("user".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_USER;
} else if (Build.VERSION.SDK_INT >= 18 && "fullUser".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_FULL_USER;
} else if (Build.VERSION.SDK_INT >= 18 && "locked".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_LOCKED;
} else if (Build.VERSION.SDK_INT >= 18 && "userLandscape".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_USER_LANDSCAPE;
} else if (Build.VERSION.SDK_INT >= 18 && "userPortrait".equalsIgnoreCase(attrValue)) {
return ActivityInfo.SCREEN_ORIENTATION_USER_PORTRAIT;
} else {
return ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED;
}
}
};
public static synchronized boolean init(Context context, ShareSecurityCheck checker) throws IOException {
if (!checker.getMetaContentMap().containsKey(EnvConsts.INCCOMPONENT_META_FILE)) {
ShareTinkerLog.i(TAG, "package has no incremental component meta, skip init.");
return false;
}
while (context instanceof ContextWrapper) {
final Context baseCtx = ((ContextWrapper) context).getBaseContext();
if (baseCtx == null) {
break;
}
context = baseCtx;
}
sContext = context;
sPackageName = context.getPackageName();
final String xmlMeta = checker.getMetaContentMap().get(EnvConsts.INCCOMPONENT_META_FILE);
StringReader sr = new StringReader(xmlMeta);
XmlPullParser parser = null;
try {
parser = Xml.newPullParser();
parser.setInput(sr);
int event = parser.getEventType();
while (event != XmlPullParser.END_DOCUMENT) {
switch (event) {
case XmlPullParser.START_TAG:
final String tagName = parser.getName();
if ("activity".equalsIgnoreCase(tagName)) {
final ActivityInfo aInfo = parseActivity(context, parser);
CLASS_NAME_TO_ACTIVITY_INFO_MAP.put(aInfo.name, aInfo);
} else if ("service".equalsIgnoreCase(tagName)) {
} else if ("receiver".equalsIgnoreCase(tagName)) {
} else if ("provider".equalsIgnoreCase(tagName)) {
}
break;
default:
break;
}
event = parser.next();
}
[MASK] = true;
return true;
} catch (XmlPullParserException e) {
throw new IOException(e);
} finally {
if (parser != null) {
try {
parser.setInput(null);
} catch (Throwable ignored) {
}
}
SharePatchFileUtil.closeQuietly(sr);
}
}
@SuppressWarnings("unchecked")
private static synchronized ActivityInfo parseActivity(Context context, XmlPullParser parser)
throws XmlPullParserException, IOException {
final ActivityInfo aInfo = new ActivityInfo();
final ApplicationInfo appInfo = context.getApplicationInfo();
aInfo.applicationInfo = appInfo;
aInfo.packageName = sPackageName;
aInfo.processName = appInfo.processName;
aInfo.launchMode = ActivityInfo.LAUNCH_MULTIPLE;
aInfo.permission = appInfo.permission;
aInfo.screenOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED;
aInfo.taskAffinity = appInfo.taskAffinity;
if (Build.VERSION.SDK_INT >= 11 && (appInfo.flags & ApplicationInfo.FLAG_HARDWARE_ACCELERATED) != 0) {
aInfo.flags |= ActivityInfo.FLAG_HARDWARE_ACCELERATED;
}
if (Build.VERSION.SDK_INT >= 21) {
aInfo.documentLaunchMode = ActivityInfo.DOCUMENT_LAUNCH_NONE;
}
if (Build.VERSION.SDK_INT >= 14) {
aInfo.uiOptions = appInfo.uiOptions;
}
ACTIVITY_INFO_ATTR_TRANSLATOR.translate(context, TAG_ACTIVITY, parser, aInfo);
final int outerDepth = parser.getDepth();
while (true) {
final int type = parser.next();
if (type == XmlPullParser.END_DOCUMENT
|| (type == XmlPullParser.END_TAG && parser.getDepth() <= outerDepth)) {
break;
} else if (type == XmlPullParser.END_TAG || type == XmlPullParser.TEXT) {
continue;
}
final String tagName = parser.getName();
if ("intent-filter".equalsIgnoreCase(tagName)) {
parseIntentFilter(context, aInfo.name, parser);
} else if ("meta-data".equalsIgnoreCase(tagName)) {
parseMetaData(context, aInfo, parser);
}
}
return aInfo;
}
private static synchronized void parseIntentFilter(Context context, String componentName, XmlPullParser parser)
throws XmlPullParserException, IOException {
final IntentFilter intentFilter = new IntentFilter();
final String priorityStr = parser.getAttributeValue(null, "priority");
if (!TextUtils.isEmpty(priorityStr)) {
intentFilter.setPriority(Integer.decode(priorityStr));
}
final String autoVerify = parser.getAttributeValue(null, "autoVerify");
if (!TextUtils.isEmpty(autoVerify)) {
try {
final Method setAutoVerifyMethod
= ShareReflectUtil.findMethod(IntentFilter.class, "setAutoVerify", boolean.class);
setAutoVerifyMethod.invoke(intentFilter, "true".equalsIgnoreCase(autoVerify));
} catch (Throwable ignored) {
}
}
final int outerDepth = parser.getDepth();
while (true) {
final int type = parser.next();
if (type == XmlPullParser.END_DOCUMENT
|| (type == XmlPullParser.END_TAG && parser.getDepth() <= outerDepth)) {
break;
} else if (type == XmlPullParser.END_TAG || type == XmlPullParser.TEXT) {
continue;
}
final String tagName = parser.getName();
if ("action".equals(tagName)) {
final String name = parser.getAttributeValue(null, "name");
if (name != null) {
intentFilter.addAction(name);
}
} else if ("category".equals(tagName)) {
final String name = parser.getAttributeValue(null, "name");
if (name != null) {
intentFilter.addCategory(name);
}
} else if ("data".equals(tagName)) {
final String mimeType = parser.getAttributeValue(null, "mimeType");
if (mimeType != null) {
try {
intentFilter.addDataType(mimeType);
} catch (IntentFilter.MalformedMimeTypeException e) {
throw new XmlPullParserException("bad mimeType", parser, e);
}
}
final String scheme = parser.getAttributeValue(null, "scheme");
if (scheme != null) {
intentFilter.addDataScheme(scheme);
}
if (Build.VERSION.SDK_INT >= 19) {
final String ssp = parser.getAttributeValue(null, "ssp");
if (ssp != null) {
intentFilter.addDataSchemeSpecificPart(ssp, PatternMatcher.PATTERN_LITERAL);
}
final String sspPrefix = parser.getAttributeValue(null, "sspPrefix");
if (sspPrefix != null) {
intentFilter.addDataSchemeSpecificPart(sspPrefix, PatternMatcher.PATTERN_PREFIX);
}
final String sspPattern = parser.getAttributeValue(null, "sspPattern");
if (sspPattern != null) {
intentFilter.addDataSchemeSpecificPart(sspPattern, PatternMatcher.PATTERN_SIMPLE_GLOB);
}
}
final String host = parser.getAttributeValue(null, "host");
final String port = parser.getAttributeValue(null, "port");
if (host != null) {
intentFilter.addDataAuthority(host, port);
}
final String path = parser.getAttributeValue(null, "path");
if (path != null) {
intentFilter.addDataPath(path, PatternMatcher.PATTERN_LITERAL);
}
final String pathPrefix = parser.getAttributeValue(null, "pathPrefix");
if (pathPrefix != null) {
intentFilter.addDataPath(pathPrefix, PatternMatcher.PATTERN_PREFIX);
}
final String pathPattern = parser.getAttributeValue(null, "pathPattern");
if (pathPattern != null) {
intentFilter.addDataPath(pathPattern, PatternMatcher.PATTERN_SIMPLE_GLOB);
}
}
skipCurrentTag(parser);
}
CLASS_NAME_TO_INTENT_FILTER_MAP.put(componentName, intentFilter);
}
private static synchronized void parseMetaData(Context context, ActivityInfo aInfo, XmlPullParser parser)
throws XmlPullParserException, IOException {
final ClassLoader myCl = IncrementComponentManager.class.getClassLoader();
final String name = parser.getAttributeValue(null, "name");
final String value = parser.getAttributeValue(null, "value");
if (!TextUtils.isEmpty(name)) {
if (aInfo.metaData == null) {
aInfo.metaData = new Bundle(myCl);
}
aInfo.metaData.putString(name, value);
}
}
private static void skipCurrentTag(XmlPullParser parser) throws IOException, XmlPullParserException {
int outerDepth = parser.getDepth();
int type;
while ((type = parser.next()) != XmlPullParser.END_DOCUMENT
&& (type != XmlPullParser.END_TAG
|| parser.getDepth() > outerDepth)) {
}
}
private static synchronized void ensureInitialized() {
if (![MASK]) {
throw new IllegalStateException("Not initialized!!");
}
}
public static boolean isIncrementActivity(String className) {
ensureInitialized();
return className != null && CLASS_NAME_TO_ACTIVITY_INFO_MAP.containsKey(className);
}
public static ActivityInfo queryActivityInfo(String className) {
ensureInitialized();
return (className != null ? CLASS_NAME_TO_ACTIVITY_INFO_MAP.get(className) : null);
}
public static ResolveInfo resolveIntent(Intent intent) {
ensureInitialized();
int maxPriority = -1;
String bestComponentName = null;
IntentFilter respFilter = null;
int bestMatchRes = 0;
final ComponentName component = intent.getComponent();
if (component != null) {
final String compName = component.getClassName();
if (CLASS_NAME_TO_ACTIVITY_INFO_MAP.containsKey(compName)) {
bestComponentName = compName;
maxPriority = 0;
}
} else {
for (Map.Entry<String, IntentFilter> item : CLASS_NAME_TO_INTENT_FILTER_MAP.entrySet()) {
final String componentName = item.getKey();
final IntentFilter intentFilter = item.getValue();
final int matchRes = intentFilter.match(intent.getAction(), intent.getType(),
intent.getScheme(), intent.getData(), intent.getCategories(), TAG);
final boolean matches = (matchRes != IntentFilter.NO_MATCH_ACTION)
&& (matchRes != IntentFilter.NO_MATCH_CATEGORY)
&& (matchRes != IntentFilter.NO_MATCH_DATA)
&& (matchRes != IntentFilter.NO_MATCH_TYPE);
final int priority = intentFilter.getPriority();
if (matches && priority > maxPriority) {
maxPriority = priority;
bestComponentName = componentName;
respFilter = intentFilter;
bestMatchRes = matchRes;
}
}
}
if (bestComponentName != null) {
final ResolveInfo result = new ResolveInfo();
result.activityInfo = CLASS_NAME_TO_ACTIVITY_INFO_MAP.get(bestComponentName);
result.filter = respFilter;
result.match = bestMatchRes;
result.priority = maxPriority;
result.resolvePackageName = sPackageName;
result.icon = result.activityInfo.icon;
result.labelRes = result.activityInfo.labelRes;
return result;
} else {
return null;
}
}
private IncrementComponentManager() {
throw new UnsupportedOperationException();
}
} | sInitialized | java | tinker |
package org.springframework.boot.context.properties.bind;
import java.lang.reflect.Constructor;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import org.apache.tomcat.jdbc.pool.PoolProperties;
import org.junit.jupiter.api.Test;
import org.springframework.aot.hint.ExecutableHint;
import org.springframework.aot.hint.RuntimeHints;
import org.springframework.aot.hint.TypeHint;
import org.springframework.aot.hint.TypeReference;
import org.springframework.aot.hint.predicate.RuntimeHintsPredicates;
import org.springframework.boot.context.properties.BoundConfigurationProperties;
import org.springframework.boot.context.properties.ConfigurationPropertiesBean;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
import org.springframework.boot.context.properties.bind.BindableRuntimeHintsRegistrarTests.BaseProperties.InheritedNested;
import org.springframework.boot.context.properties.bind.BindableRuntimeHintsRegistrarTests.ComplexNestedProperties.ListenerRetry;
import org.springframework.boot.context.properties.bind.BindableRuntimeHintsRegistrarTests.ComplexNestedProperties.Retry;
import org.springframework.boot.context.properties.bind.BindableRuntimeHintsRegistrarTests.ComplexNestedProperties.Simple;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.EnvironmentAware;
import org.springframework.core.StandardReflectionParameterNameDiscoverer;
import org.springframework.core.env.Environment;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatNoException;
class BindableRuntimeHintsRegistrarTests {
@Test
void registerHints() {
RuntimeHints runtimeHints = new RuntimeHints();
Class<?>[] types = { BoundConfigurationProperties.class, ConfigurationPropertiesBean.class };
BindableRuntimeHintsRegistrar registrar = new BindableRuntimeHintsRegistrar(types);
registrar.registerHints(runtimeHints);
for (Class<?> type : types) {
assertThat(RuntimeHintsPredicates.reflection().onType(type)).accepts(runtimeHints);
}
}
@Test
void registerHintsWithIterable() {
RuntimeHints runtimeHints = new RuntimeHints();
List<Class<?>> types = Arrays.asList(BoundConfigurationProperties.class, ConfigurationPropertiesBean.class);
BindableRuntimeHintsRegistrar registrar = BindableRuntimeHintsRegistrar.forTypes(types);
registrar.registerHints(runtimeHints);
for (Class<?> type : types) {
assertThat(RuntimeHintsPredicates.reflection().onType(type)).accepts(runtimeHints);
}
}
@Test
void registerHintsWhenNoClasses() {
RuntimeHints runtimeHints = new RuntimeHints();
BindableRuntimeHintsRegistrar registrar = new BindableRuntimeHintsRegistrar(new Class<?>[0]);
registrar.registerHints(runtimeHints);
assertThat(runtimeHints.reflection().typeHints()).isEmpty();
}
@Test
void registerHintsViaForType() {
RuntimeHints runtimeHints = new RuntimeHints();
Class<?>[] types = { BoundConfigurationProperties.class, ConfigurationPropertiesBean.class };
BindableRuntimeHintsRegistrar registrar = BindableRuntimeHintsRegistrar.forTypes(types);
registrar.registerHints(runtimeHints);
for (Class<?> type : types) {
assertThat(RuntimeHintsPredicates.reflection().onType(type)).accepts(runtimeHints);
}
}
@Test
void registerHintsWhenJavaBean() {
RuntimeHints runtimeHints = registerHints(JavaBean.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement().satisfies(javaBeanBinding(JavaBean.class));
}
@Test
void registerHintsWhenJavaBeanWithSeveralConstructors() throws NoSuchMethodException {
RuntimeHints runtimeHints = registerHints(WithSeveralConstructors.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(javaBeanBinding(WithSeveralConstructors.class,
WithSeveralConstructors.class.getDeclaredConstructor()));
}
@Test
void registerHintsWhenJavaBeanWithMapOfPojo() {
RuntimeHints runtimeHints = registerHints(WithMap.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(javaBeanBinding(WithMap.class, "getAddresses"))
.anySatisfy(javaBeanBinding(Address.class));
}
@Test
void registerHintsWhenJavaBeanWithListOfPojo() {
RuntimeHints runtimeHints = registerHints(WithList.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(javaBeanBinding(WithList.class, "getAllAddresses"))
.anySatisfy(javaBeanBinding(Address.class));
}
@Test
void registerHintsWhenJavaBeanWitArrayOfPojo() {
RuntimeHints runtimeHints = registerHints(WithArray.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(javaBeanBinding(WithArray.class, "getAllAddresses"))
.anySatisfy(javaBeanBinding(Address.class));
}
@Test
void registerHintsWhenJavaBeanWithListOfJavaType() {
RuntimeHints runtimeHints = registerHints(WithSimpleList.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(javaBeanBinding(WithSimpleList.class, "getNames"));
}
@Test
void registerHintsWhenValueObject() {
RuntimeHints runtimeHints = registerHints(Immutable.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(valueObjectBinding(Immutable.class));
}
@Test
void registerHintsWhenValueObjectWithSpecificConstructor() throws NoSuchMethodException {
RuntimeHints runtimeHints = registerHints(ImmutableWithSeveralConstructors.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(valueObjectBinding(ImmutableWithSeveralConstructors.class,
ImmutableWithSeveralConstructors.class.getDeclaredConstructor(String.class)));
}
@Test
void registerHintsWhenValueObjectWithSeveralLayersOfPojo() {
RuntimeHints runtimeHints = registerHints(ImmutableWithList.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(3)
.anySatisfy(valueObjectBinding(ImmutableWithList.class))
.anySatisfy(valueObjectBinding(Person.class))
.anySatisfy(valueObjectBinding(Address.class));
}
@Test
void registerHintsWhenHasNestedTypeNotUsedIsIgnored() {
RuntimeHints runtimeHints = registerHints(WithNested.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement().satisfies(javaBeanBinding(WithNested.class));
}
@Test
void registerHintsWhenWhenHasNestedExternalType() {
RuntimeHints runtimeHints = registerHints(WithExternalNested.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(3)
.anySatisfy(
javaBeanBinding(WithExternalNested.class, "getName", "setName", "getSampleType", "setSampleType"))
.anySatisfy(javaBeanBinding(SampleType.class, "getNested"))
.anySatisfy(javaBeanBinding(SampleType.Nested.class));
}
@Test
void registerHintsWhenHasRecursiveType() {
RuntimeHints runtimeHints = registerHints(WithRecursive.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(javaBeanBinding(WithRecursive.class, "getRecursive", "setRecursive"))
.anySatisfy(javaBeanBinding(Recursive.class, "getRecursive", "setRecursive"));
}
@Test
void registerHintsWhenValueObjectWithRecursiveType() {
RuntimeHints runtimeHints = registerHints(ImmutableWithRecursive.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(valueObjectBinding(ImmutableWithRecursive.class))
.anySatisfy(valueObjectBinding(ImmutableRecursive.class));
}
@Test
void registerHintsWhenHasWellKnownTypes() {
RuntimeHints runtimeHints = registerHints(WithWellKnownTypes.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(javaBeanBinding(WithWellKnownTypes.class, "getApplicationContext", "setApplicationContext",
"getEnvironment", "setEnvironment"));
}
@Test
void registerHintsWhenHasCrossReference() {
RuntimeHints runtimeHints = registerHints(WithCrossReference.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(3)
.anySatisfy(javaBeanBinding(WithCrossReference.class, "getCrossReferenceA", "setCrossReferenceA"))
.anySatisfy(javaBeanBinding(CrossReferenceA.class, "getCrossReferenceB", "setCrossReferenceB"))
.anySatisfy(javaBeanBinding(CrossReferenceB.class, "getCrossReferenceA", "setCrossReferenceA"));
}
@Test
void registerHintsWhenHasUnresolvedGeneric() {
RuntimeHints runtimeHints = registerHints(WithGeneric.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2)
.anySatisfy(javaBeanBinding(WithGeneric.class, "getGeneric"))
.anySatisfy(javaBeanBinding(GenericObject.class));
}
@Test
void registerHintsWhenHasNestedGenerics() {
RuntimeHints runtimeHints = registerHints(NestedGenerics.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(2);
assertThat(RuntimeHintsPredicates.reflection().onType(NestedGenerics.class)).accepts(runtimeHints);
assertThat(RuntimeHintsPredicates.reflection().onType(NestedGenerics.Nested.class)).accepts(runtimeHints);
}
@Test
void registerHintsWhenHasMultipleNestedClasses() {
RuntimeHints runtimeHints = registerHints(TripleNested.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(3);
assertThat(RuntimeHintsPredicates.reflection().onType(TripleNested.class)).accepts(runtimeHints);
assertThat(RuntimeHintsPredicates.reflection().onType(TripleNested.DoubleNested.class)).accepts(runtimeHints);
assertThat(RuntimeHintsPredicates.reflection().onType(TripleNested.DoubleNested.Nested.class))
.accepts(runtimeHints);
}
@Test
void registerHintsWhenHasPackagePrivateGettersAndSetters() {
RuntimeHints runtimeHints = registerHints(PackagePrivateGettersAndSetters.class);
assertThat(runtimeHints.reflection().typeHints()).singleElement()
.satisfies(javaBeanBinding(PackagePrivateGettersAndSetters.class, "getAlpha", "setAlpha", "getBravo",
"setBravo"));
}
@Test
void registerHintsWhenHasInheritedNestedProperties() {
RuntimeHints runtimeHints = registerHints(ExtendingProperties.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(3);
assertThat(runtimeHints.reflection().getTypeHint(BaseProperties.class)).satisfies((entry) -> {
assertThat(entry.getMemberCategories()).isEmpty();
assertThat(entry.methods()).extracting(ExecutableHint::getName)
.containsExactlyInAnyOrder("getInheritedNested", "setInheritedNested");
});
assertThat(runtimeHints.reflection().getTypeHint(ExtendingProperties.class))
.satisfies(javaBeanBinding(ExtendingProperties.class, "getBravo", "setBravo"));
assertThat(runtimeHints.reflection().getTypeHint(InheritedNested.class))
.satisfies(javaBeanBinding(InheritedNested.class, "getAlpha", "setAlpha"));
}
@Test
void registerHintsWhenHasComplexNestedProperties() {
RuntimeHints runtimeHints = registerHints(ComplexNestedProperties.class);
assertThat(runtimeHints.reflection().typeHints()).hasSize(4);
assertThat(runtimeHints.reflection().getTypeHint(Retry.class)).satisfies((entry) -> {
assertThat(entry.getMemberCategories()).isEmpty();
assertThat(entry.methods()).extracting(ExecutableHint::getName)
.containsExactlyInAnyOrder("getCount", "setCount");
});
assertThat(runtimeHints.reflection().getTypeHint(ListenerRetry.class))
.satisfies(javaBeanBinding(ListenerRetry.class, "isStateless", "setStateless"));
assertThat(runtimeHints.reflection().getTypeHint(Simple.class))
.satisfies(javaBeanBinding(Simple.class, "getRetry"));
assertThat(runtimeHints.reflection().getTypeHint(ComplexNestedProperties.class))
.satisfies(javaBeanBinding(ComplexNestedProperties.class, "getSimple"));
}
@Test
void registerHintsDoesNotThrowWhenParameterInformationForConstructorBindingIsNotAvailable()
throws NoSuchMethodException, SecurityException {
Constructor<?> constructor = PoolProperties.InterceptorProperty.class.getConstructor(String.class,
String.class);
String[] parameterNames = new StandardReflectionParameterNameDiscoverer().getParameterNames(constructor);
assertThat(parameterNames).isNull();
assertThatNoException().isThrownBy(() -> registerHints(PoolProperties.class));
}
private Consumer<TypeHint> javaBeanBinding(Class<?> type, String... [MASK]) {
return javaBeanBinding(type, type.getDeclaredConstructors()[0], [MASK]);
}
private Consumer<TypeHint> javaBeanBinding(Class<?> type, Constructor<?> constructor, String... [MASK]) {
return (entry) -> {
assertThat(entry.getType()).isEqualTo(TypeReference.of(type));
assertThat(entry.constructors()).singleElement().satisfies(match(constructor));
assertThat(entry.getMemberCategories()).isEmpty();
assertThat(entry.methods()).extracting(ExecutableHint::getName).containsExactlyInAnyOrder([MASK]);
};
}
private Consumer<TypeHint> valueObjectBinding(Class<?> type) {
return valueObjectBinding(type, type.getDeclaredConstructors()[0]);
}
private Consumer<TypeHint> valueObjectBinding(Class<?> type, Constructor<?> constructor) {
return (entry) -> {
assertThat(entry.getType()).isEqualTo(TypeReference.of(type));
assertThat(entry.constructors()).singleElement().satisfies(match(constructor));
assertThat(entry.getMemberCategories()).isEmpty();
assertThat(entry.methods()).isEmpty();
};
}
private Consumer<ExecutableHint> match(Constructor<?> constructor) {
return (executableHint) -> {
assertThat(executableHint.getName()).isEqualTo("<init>");
assertThat(Arrays.stream(constructor.getParameterTypes()).map(TypeReference::of).toList())
.isEqualTo(executableHint.getParameterTypes());
};
}
private RuntimeHints registerHints(Class<?>... types) {
RuntimeHints hints = new RuntimeHints();
BindableRuntimeHintsRegistrar.forTypes(types).registerHints(hints);
return hints;
}
public static class JavaBean {
}
public static class WithSeveralConstructors {
WithSeveralConstructors() {
}
WithSeveralConstructors(String ignored) {
}
}
public static class WithMap {
public Map<String, Address> getAddresses() {
return Collections.emptyMap();
}
}
public static class WithList {
public List<Address> getAllAddresses() {
return Collections.emptyList();
}
}
public static class WithSimpleList {
public List<String> getNames() {
return Collections.emptyList();
}
}
public static class WithArray {
public Address[] getAllAddresses() {
return new Address[0];
}
}
public static class Immutable {
@SuppressWarnings("unused")
private final String name;
Immutable(String name) {
this.name = name;
}
}
public static class ImmutableWithSeveralConstructors {
@SuppressWarnings("unused")
private final String name;
@ConstructorBinding
ImmutableWithSeveralConstructors(String name) {
this.name = name;
}
ImmutableWithSeveralConstructors() {
this("test");
}
}
public static class ImmutableWithList {
@SuppressWarnings("unused")
private final List<Person> family;
ImmutableWithList(List<Person> family) {
this.family = family;
}
}
public static class WithNested {
static class OneLevelDown {
}
}
public static class WithExternalNested {
private String name;
@NestedConfigurationProperty
private SampleType sampleType;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public SampleType getSampleType() {
return this.sampleType;
}
public void setSampleType(SampleType sampleType) {
this.sampleType = sampleType;
}
}
public static class WithRecursive {
@NestedConfigurationProperty
private Recursive recursive;
public Recursive getRecursive() {
return this.recursive;
}
public void setRecursive(Recursive recursive) {
this.recursive = recursive;
}
}
public static class ImmutableWithRecursive {
@NestedConfigurationProperty
private final ImmutableRecursive recursive;
ImmutableWithRecursive(ImmutableRecursive recursive) {
this.recursive = recursive;
}
}
public static class WithWellKnownTypes implements ApplicationContextAware, EnvironmentAware {
private ApplicationContext applicationContext;
private Environment environment;
public ApplicationContext getApplicationContext() {
return this.applicationContext;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
public Environment getEnvironment() {
return this.environment;
}
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
}
public static class SampleType {
private final Nested nested = new Nested();
public Nested getNested() {
return this.nested;
}
static class Nested {
}
}
public static class PackagePrivateGettersAndSetters {
private String alpha;
private Map<String, String> bravo;
String getAlpha() {
return this.alpha;
}
void setAlpha(String alpha) {
this.alpha = alpha;
}
Map<String, String> getBravo() {
return this.bravo;
}
void setBravo(Map<String, String> bravo) {
this.bravo = bravo;
}
}
public static class Address {
}
public static class Person {
@SuppressWarnings("unused")
private final String firstName;
@SuppressWarnings("unused")
private final String lastName;
@NestedConfigurationProperty
private final Address address;
Person(String firstName, String lastName, Address address) {
this.firstName = firstName;
this.lastName = lastName;
this.address = address;
}
}
public static class Recursive {
private Recursive recursive;
public Recursive getRecursive() {
return this.recursive;
}
public void setRecursive(Recursive recursive) {
this.recursive = recursive;
}
}
public static class ImmutableRecursive {
@SuppressWarnings("unused")
private final ImmutableRecursive recursive;
ImmutableRecursive(ImmutableRecursive recursive) {
this.recursive = recursive;
}
}
public static class WithCrossReference {
@NestedConfigurationProperty
private CrossReferenceA crossReferenceA;
public void setCrossReferenceA(CrossReferenceA crossReferenceA) {
this.crossReferenceA = crossReferenceA;
}
public CrossReferenceA getCrossReferenceA() {
return this.crossReferenceA;
}
}
public static class CrossReferenceA {
@NestedConfigurationProperty
private CrossReferenceB crossReferenceB;
public void setCrossReferenceB(CrossReferenceB crossReferenceB) {
this.crossReferenceB = crossReferenceB;
}
public CrossReferenceB getCrossReferenceB() {
return this.crossReferenceB;
}
}
public static class CrossReferenceB {
private CrossReferenceA crossReferenceA;
public void setCrossReferenceA(CrossReferenceA crossReferenceA) {
this.crossReferenceA = crossReferenceA;
}
public CrossReferenceA getCrossReferenceA() {
return this.crossReferenceA;
}
}
public static class WithGeneric {
@NestedConfigurationProperty
private GenericObject<?> generic;
public GenericObject<?> getGeneric() {
return this.generic;
}
}
public static final class GenericObject<T> {
private final T value;
GenericObject(T value) {
this.value = value;
}
public T getValue() {
return this.value;
}
}
public static class NestedGenerics {
private final Map<String, List<Nested>> nested = new HashMap<>();
public Map<String, List<Nested>> getNested() {
return this.nested;
}
public static class Nested {
private String field;
public String getField() {
return this.field;
}
public void setField(String field) {
this.field = field;
}
}
}
public static class TripleNested {
private final DoubleNested doubleNested = new DoubleNested();
public DoubleNested getDoubleNested() {
return this.doubleNested;
}
public static class DoubleNested {
private final Nested nested = new Nested();
public Nested getNested() {
return this.nested;
}
public static class Nested {
private String field;
public String getField() {
return this.field;
}
public void setField(String field) {
this.field = field;
}
}
}
}
public abstract static class BaseProperties {
private InheritedNested inheritedNested;
public InheritedNested getInheritedNested() {
return this.inheritedNested;
}
public void setInheritedNested(InheritedNested inheritedNested) {
this.inheritedNested = inheritedNested;
}
public static class InheritedNested {
private String alpha;
public String getAlpha() {
return this.alpha;
}
public void setAlpha(String alpha) {
this.alpha = alpha;
}
}
}
public static class ExtendingProperties extends BaseProperties {
private String bravo;
public String getBravo() {
return this.bravo;
}
public void setBravo(String bravo) {
this.bravo = bravo;
}
}
public static class ComplexNestedProperties {
private final Simple simple = new Simple();
public Simple getSimple() {
return this.simple;
}
public static class Simple {
private final ListenerRetry retry = new ListenerRetry();
public ListenerRetry getRetry() {
return this.retry;
}
}
public abstract static class Retry {
private int count = 5;
public int getCount() {
return this.count;
}
public void setCount(int count) {
this.count = count;
}
}
public static class ListenerRetry extends Retry {
private boolean stateless;
public boolean isStateless() {
return this.stateless;
}
public void setStateless(boolean stateless) {
this.stateless = stateless;
}
}
}
} | expectedMethods | java | spring-boot |
package jenkins.bugs;
import hudson.model.FreeStyleProject;
import org.htmlunit.cssparser.parser.CSSErrorHandler;
import org.htmlunit.cssparser.parser.CSSException;
import org.htmlunit.cssparser.parser.CSSParseException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ErrorCollector;
import org.jvnet.hudson.test.JenkinsRule;
public class Jenkins14749Test {
@Rule
public JenkinsRule j = new JenkinsRule();
@Rule
public ErrorCollector errors = new ErrorCollector();
@Test
public void dashboard() throws Exception {
JenkinsRule.WebClient webClient = createErrorReportingWebClient();
webClient.goTo("");
}
@Test
public void project() throws Exception {
FreeStyleProject p = j.createFreeStyleProject();
JenkinsRule.WebClient webClient = createErrorReportingWebClient();
webClient.getPage(p);
}
@Test
public void configureProject() throws Exception {
FreeStyleProject p = j.createFreeStyleProject();
JenkinsRule.WebClient webClient = createErrorReportingWebClient();
webClient.getPage(p, "configure");
}
@Test
public void manage() throws Exception {
JenkinsRule.WebClient webClient = createErrorReportingWebClient();
webClient.goTo("manage");
}
@Test
public void system() throws Exception {
JenkinsRule.WebClient webClient = createErrorReportingWebClient();
webClient.goTo("manage/configure");
}
private JenkinsRule.WebClient createErrorReportingWebClient() {
JenkinsRule.WebClient webClient = j.createWebClient();
webClient.setCssErrorHandler(new CSSErrorHandler() {
@Override
public void warning(final CSSParseException [MASK]) throws CSSException {
errors.addError([MASK]);
}
@Override
public void error(final CSSParseException [MASK]) throws CSSException {
errors.addError([MASK]);
}
@Override
public void fatalError(final CSSParseException [MASK]) throws CSSException {
errors.addError([MASK]);
}
});
return webClient;
}
} | exception | java | jenkins |
package org.apache.ibatis.scripting.defaults;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.apache.ibatis.builder.StaticSqlSource;
import org.apache.ibatis.domain.blog.Author;
import org.apache.ibatis.domain.blog.Section;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.ParameterMapping;
import org.apache.ibatis.mapping.ResultMap;
import org.apache.ibatis.mapping.ResultMapping;
import org.apache.ibatis.mapping.SqlCommandType;
import org.apache.ibatis.reflection.DefaultReflectorFactory;
import org.apache.ibatis.reflection.MetaObject;
import org.apache.ibatis.reflection.ReflectorFactory;
import org.apache.ibatis.reflection.factory.DefaultObjectFactory;
import org.apache.ibatis.reflection.factory.ObjectFactory;
import org.apache.ibatis.reflection.wrapper.DefaultObjectWrapperFactory;
import org.apache.ibatis.reflection.wrapper.ObjectWrapperFactory;
import org.apache.ibatis.scripting.xmltags.XMLLanguageDriver;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.TypeException;
import org.apache.ibatis.type.TypeHandler;
import org.apache.ibatis.type.TypeHandlerRegistry;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class DefaultParameterHandlerTest {
@Test
void setParametersThrowsProperException() throws SQLException {
final MappedStatement mappedStatement = getMappedStatement();
final Object parameterObject = null;
final BoundSql boundSql = mock(BoundSql.class);
TypeHandler<Object> typeHandler = mock(TypeHandler.class);
doThrow(new SQLException("foo")).when(typeHandler).setParameter(any(PreparedStatement.class), anyInt(), any(),
any(JdbcType.class));
ParameterMapping parameterMapping = new ParameterMapping.Builder(mappedStatement.getConfiguration(), "prop",
typeHandler).build();
List<ParameterMapping> parameterMappings = List.of(parameterMapping);
when(boundSql.getParameterMappings()).thenReturn(parameterMappings);
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
when(ps.getParameterMetaData()).thenReturn(mock(ParameterMetaData.class));
try {
defaultParameterHandler.setParameters(ps);
Assertions.fail("Should have thrown TypeException");
} catch (Exception e) {
Assertions.assertTrue(e instanceof TypeException, "expected TypeException");
Assertions.assertTrue(e.getMessage().contains("mapping: ParameterMapping"));
}
}
MappedStatement getMappedStatement() {
final Configuration config = new Configuration();
final TypeHandlerRegistry registry = config.getTypeHandlerRegistry();
return new MappedStatement.Builder(config, "testSelect", new StaticSqlSource(config, "some select statement"),
SqlCommandType.SELECT).resultMaps(new ArrayList<ResultMap>() {
private static final long serialVersionUID = 1L;
{
add(new ResultMap.Builder(config, "testMap", HashMap.class, new ArrayList<ResultMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ResultMapping.Builder(config, "cOlUmN1", "CoLuMn1", registry.getTypeHandler(Integer.class))
.build());
}
}).build());
}
}).build();
}
@Test
void parameterObjectGetPropertyValueWithAdditionalParameter() throws SQLException {
Configuration config = new Configuration();
TypeHandlerRegistry registry = config.getTypeHandlerRegistry();
MappedStatement mappedStatement = new MappedStatement.Builder(config, "testSelect",
new StaticSqlSource(config, "some select statement"), SqlCommandType.SELECT).build();
Object parameterObject = 1;
BoundSql boundSql = new BoundSql(config, "some select statement", new ArrayList<ParameterMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ParameterMapping.Builder(config, "id", registry.getTypeHandler(int.class)).build());
}
}, parameterObject) {
{
setAdditionalParameter("id", 2);
}
};
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
ParameterMetaData pmd = mock(ParameterMetaData.class);
when(pmd.getParameterType(1)).thenReturn(Types.INTEGER);
when(ps.getParameterMetaData()).thenReturn(pmd);
defaultParameterHandler.setParameters(ps);
verify(ps).setInt(1, 2);
}
@Test
void parameterObjectGetPropertyValueWithNull() throws SQLException {
Configuration config = new Configuration();
TypeHandlerRegistry registry = config.getTypeHandlerRegistry();
MappedStatement mappedStatement = new MappedStatement.Builder(config, "testSelect",
new StaticSqlSource(config, "some select statement"), SqlCommandType.SELECT).build();
Object parameterObject = null;
BoundSql boundSql = new BoundSql(config, "some select statement", new ArrayList<ParameterMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ParameterMapping.Builder(config, "id", registry.getTypeHandler(int.class)).build());
}
}, parameterObject);
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
ParameterMetaData pmd = mock(ParameterMetaData.class);
when(pmd.getParameterType(1)).thenReturn(Types.INTEGER);
when(ps.getParameterMetaData()).thenReturn(pmd);
defaultParameterHandler.setParameters(ps);
verify(ps).setNull(1, config.getJdbcTypeForNull().TYPE_CODE);
}
@Test
void parameterObjectGetPropertyValueWithTypeHandler() throws SQLException {
Configuration config = new Configuration();
TypeHandlerRegistry registry = config.getTypeHandlerRegistry();
MappedStatement mappedStatement = new MappedStatement.Builder(config, "testSelect",
new StaticSqlSource(config, "some select statement"), SqlCommandType.SELECT).build();
Object parameterObject = 1;
BoundSql boundSql = new BoundSql(config, "some select statement", new ArrayList<ParameterMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ParameterMapping.Builder(config, "id", registry.getTypeHandler(int.class)).build());
}
}, parameterObject);
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
ParameterMetaData pmd = mock(ParameterMetaData.class);
when(pmd.getParameterType(1)).thenReturn(Types.INTEGER);
when(ps.getParameterMetaData()).thenReturn(pmd);
defaultParameterHandler.setParameters(ps);
verify(ps).setInt(1, (Integer) parameterObject);
}
@Test
void parameterObjectGetPropertyValueWithMetaObject() throws SQLException {
Configuration config = new Configuration();
TypeHandlerRegistry registry = config.getTypeHandlerRegistry();
MappedStatement mappedStatement = new MappedStatement.Builder(config, "testSelect",
new StaticSqlSource(config, "some select statement"), SqlCommandType.SELECT).build();
Author parameterObject = new Author(-1, "cbegin", "******", "[email protected]", "N/A", Section.NEWS);
BoundSql boundSql = new BoundSql(config, "some select statement", new ArrayList<ParameterMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ParameterMapping.Builder(config, "id", registry.getTypeHandler(int.class)).build());
add(new ParameterMapping.Builder(config, "username", registry.getTypeHandler(String.class)).build());
add(new ParameterMapping.Builder(config, "password", registry.getTypeHandler(String.class)).build());
add(new ParameterMapping.Builder(config, "email", registry.getTypeHandler(String.class)).build());
add(new ParameterMapping.Builder(config, "bio", registry.getTypeHandler(String.class))
.jdbcType(JdbcType.VARCHAR).build());
add(new ParameterMapping.Builder(config, "favouriteSection", registry.getTypeHandler(Section.class))
.jdbcType(JdbcType.VARCHAR).build());
}
}, parameterObject);
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
ParameterMetaData pmd = mock(ParameterMetaData.class);
when(pmd.getParameterType(1)).thenReturn(Types.INTEGER);
when(ps.getParameterMetaData()).thenReturn(pmd);
defaultParameterHandler.setParameters(ps);
verify(ps).setInt(1, parameterObject.getId());
verify(ps).setString(2, parameterObject.getUsername());
verify(ps).setString(3, parameterObject.getPassword());
verify(ps).setString(4, parameterObject.getEmail());
verify(ps).setString(5, parameterObject.getBio());
verify(ps).setObject(6, parameterObject.getFavouriteSection().name(), JdbcType.VARCHAR.TYPE_CODE);
}
@Test
void parameterObjectGetPropertyValueWithMetaObjectAndCreateOnce() {
Author parameterObject = mock(Author.class);
Configuration [MASK] = mock(Configuration.class);
final ObjectFactory objectFactory = new DefaultObjectFactory();
final ObjectWrapperFactory objectWrapperFactory = new DefaultObjectWrapperFactory();
final ReflectorFactory reflectorFactory = new DefaultReflectorFactory();
when([MASK].getTypeHandlerRegistry()).thenReturn(new TypeHandlerRegistry([MASK]));
when([MASK].getDefaultScriptingLanguageInstance()).thenReturn(new XMLLanguageDriver());
when([MASK].newMetaObject(parameterObject))
.thenReturn(MetaObject.forObject(parameterObject, objectFactory, objectWrapperFactory, reflectorFactory));
TypeHandlerRegistry registry = [MASK].getTypeHandlerRegistry();
MappedStatement mappedStatement = new MappedStatement.Builder([MASK], "testSelect",
new StaticSqlSource([MASK], "some select statement"), SqlCommandType.SELECT).build();
BoundSql boundSql = new BoundSql([MASK], "some select statement", new ArrayList<ParameterMapping>() {
private static final long serialVersionUID = 1L;
{
add(new ParameterMapping.Builder([MASK], "id", registry.getTypeHandler(int.class))
.jdbcType(JdbcType.INTEGER).build());
add(new ParameterMapping.Builder([MASK], "username", registry.getTypeHandler(String.class))
.jdbcType(JdbcType.VARCHAR).build());
add(new ParameterMapping.Builder([MASK], "password", registry.getTypeHandler(String.class))
.jdbcType(JdbcType.VARCHAR).build());
add(new ParameterMapping.Builder([MASK], "email", registry.getTypeHandler(String.class))
.jdbcType(JdbcType.VARCHAR).build());
add(new ParameterMapping.Builder([MASK], "bio", registry.getTypeHandler(String.class))
.jdbcType(JdbcType.VARCHAR).build());
add(new ParameterMapping.Builder([MASK], "favouriteSection", registry.getTypeHandler(Section.class))
.jdbcType(JdbcType.VARCHAR).build());
}
}, parameterObject);
DefaultParameterHandler defaultParameterHandler = new DefaultParameterHandler(mappedStatement, parameterObject,
boundSql);
PreparedStatement ps = mock(PreparedStatement.class);
defaultParameterHandler.setParameters(ps);
verify(parameterObject).getId();
verify(parameterObject).getUsername();
verify(parameterObject).getPassword();
verify(parameterObject).getEmail();
verify(parameterObject).getBio();
verify(parameterObject).getFavouriteSection();
verify([MASK]).newMetaObject(parameterObject);
}
} | mockConfig | java | mybatis-3 |
package io.socket.client;
import org.junit.After;
import org.junit.Before;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.*;
import java.util.logging.Logger;
public abstract class Connection {
private static final Logger logger = Logger.getLogger(Connection.class.getName());
final static int TIMEOUT = 7000;
final static int PORT = 3000;
private Process serverProcess;
private ExecutorService serverService;
private Future serverOutput;
private Future serverError;
@Before
public void startServer() throws IOException, InterruptedException {
logger.fine("Starting server ...");
final CountDownLatch latch = new CountDownLatch(1);
serverProcess = Runtime.getRuntime().exec(
String.format("node src/test/resources/server.js %s", nsp()), createEnv());
serverService = Executors.newCachedThreadPool();
serverOutput = serverService.submit(new Runnable() {
@Override
public void run() {
BufferedReader reader = new BufferedReader(
new InputStreamReader(serverProcess.getInputStream()));
String line;
try {
line = reader.readLine();
latch.countDown();
do {
logger.fine("SERVER OUT: " + line);
} while ((line = reader.readLine()) != null);
} catch (IOException e) {
logger.warning(e.getMessage());
}
}
});
serverError = serverService.submit(new Runnable() {
@Override
public void run() {
BufferedReader reader = new BufferedReader(
new InputStreamReader(serverProcess.getErrorStream()));
String line;
try {
while ((line = reader.readLine()) != null) {
logger.fine("SERVER ERR: " + line);
}
} catch (IOException e) {
logger.warning(e.getMessage());
}
}
});
latch.await(3000, TimeUnit.MILLISECONDS);
}
@After
public void stopServer() throws InterruptedException {
logger.fine("Stopping server ...");
serverProcess.destroy();
serverOutput.cancel(false);
serverError.cancel(false);
serverService.shutdown();
serverService.awaitTermination(3000, TimeUnit.MILLISECONDS);
}
Socket client() {
return client(createOptions());
}
Socket client(String path) {
return client(path, createOptions());
}
Socket client(IO.Options opts) {
return client(nsp(), opts);
}
Socket client(String path, IO.Options opts) {
return IO.socket(URI.create(uri() + path), opts);
}
URI uri() {
return URI.create("http:
}
String nsp() {
return "/";
}
IO.Options createOptions() {
IO.Options opts = new IO.Options();
opts.forceNew = true;
return opts;
}
String[] createEnv() {
Map<String, String> [MASK] = new HashMap<>(System.getenv());
[MASK].put("DEBUG", "socket.io:*");
[MASK].put("PORT", String.valueOf(PORT));
String[] _env = new String[[MASK].size()];
int i = 0;
for (String key : [MASK].keySet()) {
_env[i] = key + "=" + [MASK].get(key);
i++;
}
return _env;
}
} | env | java | socket.io-client-java |
package org.springframework.boot.autoconfigure.hazelcast;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.[MASK].ConditionOutcome;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.env.Environment;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.type.AnnotatedTypeMetadata;
import org.springframework.mock.env.MockEnvironment;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
class HazelcastClientConfigAvailableConditionTests {
private final HazelcastClientConfigAvailableCondition [MASK] = new HazelcastClientConfigAvailableCondition();
@Test
void explicitConfigurationWithClientConfigMatches() {
ConditionOutcome outcome = getMatchOutcome(new MockEnvironment().withProperty("spring.hazelcast.config",
"classpath:org/springframework/boot/autoconfigure/hazelcast/hazelcast-client-specific.xml"));
assertThat(outcome.isMatch()).isTrue();
assertThat(outcome.getMessage()).contains("Hazelcast client configuration detected");
}
@Test
void explicitConfigurationWithServerConfigDoesNotMatch() {
ConditionOutcome outcome = getMatchOutcome(new MockEnvironment().withProperty("spring.hazelcast.config",
"classpath:org/springframework/boot/autoconfigure/hazelcast/hazelcast-specific.xml"));
assertThat(outcome.isMatch()).isFalse();
assertThat(outcome.getMessage()).contains("Hazelcast server configuration detected");
}
@Test
void explicitConfigurationWithMissingConfigDoesNotMatch() {
ConditionOutcome outcome = getMatchOutcome(new MockEnvironment().withProperty("spring.hazelcast.config",
"classpath:org/springframework/boot/autoconfigure/hazelcast/test-config-does-not-exist.xml"));
assertThat(outcome.isMatch()).isFalse();
assertThat(outcome.getMessage()).contains("Hazelcast configuration does not exist");
}
private ConditionOutcome getMatchOutcome(Environment environment) {
ConditionContext conditionContext = mock(ConditionContext.class);
given(conditionContext.getEnvironment()).willReturn(environment);
given(conditionContext.getResourceLoader()).willReturn(new DefaultResourceLoader());
return this.[MASK].getMatchOutcome(conditionContext, mock(AnnotatedTypeMetadata.class));
}
} | condition | java | spring-boot |
package org.springframework.core.io.support;
import java.beans.PropertyEditor;
import org.junit.jupiter.api.Test;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileUrlResource;
import org.springframework.core.io.Resource;
import org.springframework.util.PlaceholderResolutionException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
class ResourceArrayPropertyEditorTests {
@Test
void vanillaResource() {
PropertyEditor editor = new ResourceArrayPropertyEditor();
editor.setAsText("classpath:org/springframework/core/io/support/ResourceArrayPropertyEditor.class");
Resource[] [MASK] = (Resource[]) editor.getValue();
assertThat([MASK]).isNotNull();
assertThat([MASK][0].exists()).isTrue();
}
@Test
void patternResource() {
PropertyEditor editor = new ResourceArrayPropertyEditor();
editor.setAsText("classpath*:org/springframework/core/io/support/Resource*Editor.class");
Resource[] [MASK] = (Resource[]) editor.getValue();
assertThat([MASK]).isNotNull();
assertThat([MASK][0].exists()).isTrue();
}
@Test
void systemPropertyReplacement() {
PropertyEditor editor = new ResourceArrayPropertyEditor();
System.setProperty("test.prop", "foo");
try {
editor.setAsText("${test.prop}");
Resource[] [MASK] = (Resource[]) editor.getValue();
assertThat([MASK][0].getFilename()).isEqualTo("foo");
}
finally {
System.clearProperty("test.prop");
}
}
@Test
void strictSystemPropertyReplacementWithUnresolvablePlaceholder() {
PropertyEditor editor = new ResourceArrayPropertyEditor(
new PathMatchingResourcePatternResolver(), new StandardEnvironment(),
false);
System.setProperty("test.prop", "foo");
try {
assertThatExceptionOfType(PlaceholderResolutionException.class).isThrownBy(() ->
editor.setAsText("${test.prop}-${bar}"));
}
finally {
System.clearProperty("test.prop");
}
}
@Test
void commaDelimitedResourcesWithSingleResource() {
PropertyEditor editor = new ResourceArrayPropertyEditor();
editor.setAsText("classpath:org/springframework/core/io/support/ResourceArrayPropertyEditor.class,file:/test.txt");
Resource[] [MASK] = (Resource[]) editor.getValue();
assertThat([MASK]).isNotNull();
assertThat([MASK][0]).isInstanceOfSatisfying(ClassPathResource.class,
resource -> assertThat(resource.exists()).isTrue());
assertThat([MASK][1]).isInstanceOfSatisfying(FileUrlResource.class,
resource -> assertThat(resource.getFilename()).isEqualTo("test.txt"));
}
@Test
void commaDelimitedResourcesWithMultipleResources() {
PropertyEditor editor = new ResourceArrayPropertyEditor();
editor.setAsText("file:/test.txt, classpath:org/springframework/core/io/support/test-[MASK]/*.txt");
Resource[] [MASK] = (Resource[]) editor.getValue();
assertThat([MASK]).isNotNull();
assertThat([MASK][0]).isInstanceOfSatisfying(FileUrlResource.class,
resource -> assertThat(resource.getFilename()).isEqualTo("test.txt"));
assertThat([MASK]).anySatisfy(candidate ->
assertThat(candidate.getFilename()).isEqualTo("resource1.txt"));
assertThat([MASK]).anySatisfy(candidate ->
assertThat(candidate.getFilename()).isEqualTo("resource2.txt"));
assertThat([MASK]).hasSize(3);
}
} | resources | java | spring-framework |
package org.elasticsearch.upgrades;
import io.netty.handler.codec.http.HttpMethod;
import com.carrotsearch.randomizedtesting.annotations.Name;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MetadataIndexStateService;
import org.elasticsearch.common.settings.Settings;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
public class AddIndexBlockRollingUpgradeIT extends AbstractRollingUpgradeTestCase {
private static final String INDEX_NAME = "test_add_block";
public AddIndexBlockRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) {
super(upgradedNodes);
}
public void testAddBlock() throws Exception {
if (isOldCluster()) {
createIndex(INDEX_NAME);
} else if (isMixedCluster()) {
blockWrites();
if (minimumTransportVersion().before(TransportVersions.ADD_INDEX_BLOCK_TWO_PHASE)) {
assertNull(verifiedSettingValue());
} else {
assertThat(verifiedSettingValue(), Matchers.equalTo("true"));
expectThrows(
ResponseException.class,
() -> updateIndexSettings(
INDEX_NAME,
Settings.builder().putNull(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey())
)
);
}
} else {
assertTrue(isUpgradedCluster());
blockWrites();
assertThat(verifiedSettingValue(), Matchers.equalTo("true"));
expectThrows(
ResponseException.class,
() -> updateIndexSettings(
INDEX_NAME,
Settings.builder().putNull(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey())
)
);
}
}
private static void blockWrites() throws IOException {
var [MASK] = randomFrom(IndexMetadata.APIBlock.READ_ONLY, IndexMetadata.APIBlock.WRITE).name().toLowerCase(Locale.ROOT);
client().performRequest(new Request(HttpMethod.PUT.name(), "/" + INDEX_NAME + "/_block/" + [MASK]));
expectThrows(
ResponseException.class,
() -> client().performRequest(
newXContentRequest(HttpMethod.PUT, "/" + INDEX_NAME + "/_doc/test", (builder, params) -> builder.field("test", "test"))
)
);
}
@SuppressWarnings("unchecked")
private static String verifiedSettingValue() throws IOException {
final var settingsRequest = new Request(HttpMethod.GET.name(), "/" + INDEX_NAME + "/_settings?flat_settings");
final Map<String, Object> settingsResponse = entityAsMap(client().performRequest(settingsRequest));
return (String) ((Map<String, Object>) ((Map<String, Object>) settingsResponse.get(INDEX_NAME)).get("settings")).get(
MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey()
);
}
} | block | java | elasticsearch |
package android.icu.dev.test.collator;
import java.util.Locale;
import org.junit.Test;
import android.icu.dev.test.TestFmwk;
import android.icu.text.CollationKey;
import android.icu.text.Collator;
import android.icu.text.RuleBasedCollator;
public class CollationCurrencyTest extends TestFmwk {
@Test
public void TestCurrency() {
char[][] currency = {
{ 0x00A4 },
{ 0x00A2 },
{ 0xFFE0 },
{ 0x0024 },
{ 0xFF04 },
{ 0xFE69 },
{ 0x00A3 },
{ 0xFFE1 },
{ 0x00A5 },
{ 0xFFE5 },
{ 0x09F2 },
{ 0x09F3 },
{ 0x0E3F },
{ 0x17DB },
{ 0x20A0 },
{ 0x20A1 },
{ 0x20A2 },
{ 0x20A3 },
{ 0x20A4 },
{ 0x20A5 },
{ 0x20A6 },
{ 0x20A7 },
{ 0x20A9 },
{ 0xFFE6 },
{ 0x20AA },
{ 0x20AB },
{ 0x20AC },
{ 0x20AD },
{ 0x20AE },
{ 0x20AF }
};
int i, j;
int expectedResult = 0;
RuleBasedCollator c = (RuleBasedCollator)Collator.getInstance(Locale.ENGLISH);
String source;
String target;
for (i = 0; i < currency.length; i += 1) {
for (j = 0; j < currency.length; j += 1) {
source = new String(currency[i]);
target = new String(currency[j]);
if (i < j) {
expectedResult = -1;
} else if ( i == j) {
expectedResult = 0;
} else {
expectedResult = 1;
}
int compareResult = c.compare(source, target);
CollationKey sourceKey = null;
sourceKey = c.getCollationKey(source);
if (sourceKey == null) {
errln("Couldn't get collationKey for source");
continue;
}
CollationKey targetKey = null;
targetKey = c.getCollationKey(target);
if (targetKey == null) {
errln("Couldn't get collationKey for source");
continue;
}
int keyResult = sourceKey.compareTo(targetKey);
reportCResult( source, target, sourceKey, targetKey, compareResult, keyResult, compareResult, expectedResult );
}
}
}
private void reportCResult( String source, String target, CollationKey sourceKey, CollationKey targetKey,
int compareResult, int keyResult, int incResult, int expectedResult ) {
if (expectedResult < -1 || expectedResult > 1) {
errln("***** invalid call to reportCResult ****");
return;
}
boolean ok1 = (compareResult == expectedResult);
boolean ok2 = (keyResult == expectedResult);
boolean [MASK] = (incResult == expectedResult);
if (ok1 && ok2 && [MASK] && !isVerbose()) {
return;
} else {
String msg1 = ok1? "Ok: compare(\"" : "FAIL: compare(\"";
String msg2 = "\", \"";
String msg3 = "\") returned ";
String msg4 = "; expected ";
String sExpect = new String("");
String sResult = new String("");
sResult = CollationTest.appendCompareResult(compareResult, sResult);
sExpect = CollationTest.appendCompareResult(expectedResult, sExpect);
if (ok1) {
logln(msg1 + source + msg2 + target + msg3 + sResult);
} else {
errln(msg1 + source + msg2 + target + msg3 + sResult + msg4 + sExpect);
}
msg1 = ok2 ? "Ok: key(\"" : "FAIL: key(\"";
msg2 = "\").compareTo(key(\"";
msg3 = "\")) returned ";
sResult = CollationTest.appendCompareResult(keyResult, sResult);
if (ok2) {
logln(msg1 + source + msg2 + target + msg3 + sResult);
} else {
errln(msg1 + source + msg2 + target + msg3 + sResult + msg4 + sExpect);
msg1 = " ";
msg2 = " vs. ";
errln(msg1 + CollationTest.prettify(sourceKey) + msg2 + CollationTest.prettify(targetKey));
}
msg1 = [MASK] ? "Ok: incCompare(\"" : "FAIL: incCompare(\"";
msg2 = "\", \"";
msg3 = "\") returned ";
sResult = CollationTest.appendCompareResult(incResult, sResult);
if ([MASK]) {
logln(msg1 + source + msg2 + target + msg3 + sResult);
} else {
errln(msg1 + source + msg2 + target + msg3 + sResult + msg4 + sExpect);
}
}
}
} | ok3 | java | j2objc |
package org.springframework.web.reactive.result.view.freemarker;
import org.springframework.web.reactive.result.view.AbstractUrlBasedView;
import org.springframework.web.reactive.result.view.UrlBasedViewResolver;
public class FreeMarkerViewResolver extends UrlBasedViewResolver {
public FreeMarkerViewResolver() {
setViewClass(requiredViewClass());
}
public FreeMarkerViewResolver(String [MASK], String suffix) {
setViewClass(requiredViewClass());
setPrefix([MASK]);
setSuffix(suffix);
}
@Override
protected Class<?> requiredViewClass() {
return FreeMarkerView.class;
}
@Override
protected AbstractUrlBasedView instantiateView() {
return (getViewClass() == FreeMarkerView.class ? new FreeMarkerView() : super.instantiateView());
}
} | prefix | java | spring-framework |
package org.elasticsearch.gradle.internal.precommit;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.networknt.schema.JsonSchema;
import com.networknt.schema.JsonSchemaException;
import com.networknt.schema.JsonSchemaFactory;
import com.networknt.schema.SchemaValidatorsConfig;
import com.networknt.schema.SpecVersion;
import com.networknt.schema.ValidationMessage;
import org.gradle.api.DefaultTask;
import org.gradle.api.UncheckedIOException;
import org.gradle.api.file.FileCollection;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.OutputFile;
import org.gradle.api.tasks.TaskAction;
import org.gradle.work.ChangeType;
import org.gradle.work.FileChange;
import org.gradle.work.Incremental;
import org.gradle.work.InputChanges;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.StreamSupport;
public class ValidateJsonAgainstSchemaTask extends DefaultTask {
private File jsonSchema;
private File [MASK];
private FileCollection inputFiles;
@Incremental
@InputFiles
public FileCollection getInputFiles() {
return inputFiles;
}
public void setInputFiles(FileCollection inputFiles) {
this.inputFiles = inputFiles;
}
@InputFile
public File getJsonSchema() {
return jsonSchema;
}
public void setJsonSchema(File jsonSchema) {
this.jsonSchema = jsonSchema;
}
public void setReport(File [MASK]) {
this.[MASK] = [MASK];
}
@OutputFile
public File getReport() {
return this.[MASK];
}
@Internal
protected ObjectMapper getMapper() {
return new ObjectMapper();
}
@Internal
protected String getFileType() {
return "JSON";
}
@TaskAction
public void validate(InputChanges inputChanges) throws IOException {
final File jsonSchemaOnDisk = getJsonSchema();
final JsonSchema jsonSchema = buildSchemaObject(jsonSchemaOnDisk);
final Map<File, Set<String>> errors = new LinkedHashMap<>();
final ObjectMapper mapper = this.getMapper();
StreamSupport.stream(inputChanges.getFileChanges(getInputFiles()).spliterator(), false)
.filter(f -> f.getChangeType() != ChangeType.REMOVED)
.map(FileChange::getFile)
.filter(file -> file.isDirectory() == false)
.forEach(file -> {
try {
Set<ValidationMessage> validationMessages = jsonSchema.validate(mapper.readTree(file));
maybeLogAndCollectError(validationMessages, errors, file);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
if (errors.isEmpty()) {
Files.writeString(getReport().toPath(), "Success! No validation errors found.", StandardOpenOption.CREATE);
} else {
try (PrintWriter printWriter = new PrintWriter(getReport())) {
printWriter.printf("Schema: %s%n", jsonSchemaOnDisk);
printWriter.println("----------Validation Errors-----------");
errors.values().stream().flatMap(Collection::stream).forEach(printWriter::println);
}
StringBuilder sb = new StringBuilder();
sb.append("Verification failed. See the [MASK] at: ");
sb.append(getReport().toURI().toASCIIString());
sb.append(System.lineSeparator());
sb.append(
String.format(
"Error validating %s: %d files contained %d violations",
getFileType(),
errors.keySet().size(),
errors.values().size()
)
);
throw new JsonSchemaException(sb.toString());
}
}
private JsonSchema buildSchemaObject(File jsonSchemaOnDisk) throws IOException {
final ObjectMapper jsonMapper = new ObjectMapper();
final SchemaValidatorsConfig config = new SchemaValidatorsConfig();
final JsonSchemaFactory factory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7);
return factory.getSchema(jsonMapper.readTree(jsonSchemaOnDisk), config);
}
private void maybeLogAndCollectError(Set<ValidationMessage> messages, Map<File, Set<String>> errors, File file) {
final String fileType = getFileType();
for (ValidationMessage message : messages) {
getLogger().error("[validate {}][ERROR][{}][{}]", fileType, file.getName(), message.toString());
errors.computeIfAbsent(file, k -> new LinkedHashSet<>())
.add(String.format("%s: %s", file.getAbsolutePath(), message.toString()));
}
}
} | report | java | elasticsearch |
package org.springframework.boot.build.bom.bomr.version;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.ComparableVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
final class MultipleComponentsDependencyVersion extends ArtifactVersionDependencyVersion {
private final String [MASK];
private MultipleComponentsDependencyVersion(ArtifactVersion artifactVersion, String [MASK]) {
super(artifactVersion, new ComparableVersion([MASK]));
this.[MASK] = [MASK];
}
@Override
public String toString() {
return this.[MASK];
}
static MultipleComponentsDependencyVersion parse(String input) {
String[] components = input.split("\\.");
if (components.length == 4 || components.length == 5) {
ArtifactVersion artifactVersion = new DefaultArtifactVersion(
components[0] + "." + components[1] + "." + components[2]);
if (artifactVersion.getQualifier() != null && artifactVersion.getQualifier().equals(input)) {
return null;
}
return new MultipleComponentsDependencyVersion(artifactVersion, input);
}
return null;
}
} | original | java | spring-boot |
package org.springframework.http.codec.support;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import org.springframework.core.ResolvableType;
import org.springframework.core.codec.ByteArrayDecoder;
import org.springframework.core.codec.ByteArrayEncoder;
import org.springframework.core.codec.ByteBufferDecoder;
import org.springframework.core.codec.ByteBufferEncoder;
import org.springframework.core.codec.CharSequenceEncoder;
import org.springframework.core.codec.DataBufferDecoder;
import org.springframework.core.codec.DataBufferEncoder;
import org.springframework.core.codec.Decoder;
import org.springframework.core.codec.Encoder;
import org.springframework.core.codec.NettyByteBufDecoder;
import org.springframework.core.codec.NettyByteBufEncoder;
import org.springframework.core.codec.ResourceDecoder;
import org.springframework.core.codec.StringDecoder;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import org.springframework.http.MediaType;
import org.springframework.http.codec.ClientCodecConfigurer;
import org.springframework.http.codec.DecoderHttpMessageReader;
import org.springframework.http.codec.EncoderHttpMessageWriter;
import org.springframework.http.codec.FormHttpMessageReader;
import org.springframework.http.codec.FormHttpMessageWriter;
import org.springframework.http.codec.HttpMessageReader;
import org.springframework.http.codec.HttpMessageWriter;
import org.springframework.http.codec.ResourceHttpMessageReader;
import org.springframework.http.codec.ResourceHttpMessageWriter;
import org.springframework.http.codec.ServerSentEventHttpMessageReader;
import org.springframework.http.codec.cbor.KotlinSerializationCborDecoder;
import org.springframework.http.codec.cbor.KotlinSerializationCborEncoder;
import org.springframework.http.codec.json.Jackson2CodecSupport;
import org.springframework.http.codec.json.Jackson2JsonDecoder;
import org.springframework.http.codec.json.Jackson2JsonEncoder;
import org.springframework.http.codec.json.Jackson2SmileDecoder;
import org.springframework.http.codec.json.Jackson2SmileEncoder;
import org.springframework.http.codec.multipart.DefaultPartHttpMessageReader;
import org.springframework.http.codec.multipart.MultipartHttpMessageReader;
import org.springframework.http.codec.multipart.MultipartHttpMessageWriter;
import org.springframework.http.codec.multipart.PartEventHttpMessageReader;
import org.springframework.http.codec.multipart.PartEventHttpMessageWriter;
import org.springframework.http.codec.multipart.PartHttpMessageWriter;
import org.springframework.http.codec.protobuf.KotlinSerializationProtobufDecoder;
import org.springframework.http.codec.protobuf.KotlinSerializationProtobufEncoder;
import org.springframework.http.codec.protobuf.ProtobufDecoder;
import org.springframework.http.codec.protobuf.ProtobufHttpMessageWriter;
import org.springframework.http.codec.xml.Jaxb2XmlDecoder;
import org.springframework.http.codec.xml.Jaxb2XmlEncoder;
import org.springframework.util.MimeTypeUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.core.ResolvableType.forClass;
class ClientCodecConfigurerTests {
private final ClientCodecConfigurer configurer = new DefaultClientCodecConfigurer();
private final AtomicInteger index = new AtomicInteger();
@Test
void defaultReaders() {
List<HttpMessageReader<?>> readers = this.configurer.getReaders();
assertThat(readers).hasSize(18);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(ByteArrayDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(ByteBufferDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(DataBufferDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(NettyByteBufDecoder.class);
assertThat(readers.get(this.index.getAndIncrement()).getClass()).isEqualTo(ResourceHttpMessageReader.class);
assertStringDecoder(getNextDecoder(readers), true);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(ProtobufDecoder.class);
assertThat(readers.get(this.index.getAndIncrement()).getClass()).isEqualTo(FormHttpMessageReader.class);
assertThat(readers.get(this.index.getAndIncrement()).getClass()).isEqualTo(DefaultPartHttpMessageReader.class);
assertThat(readers.get(this.index.getAndIncrement()).getClass()).isEqualTo(MultipartHttpMessageReader.class);
assertThat(readers.get(this.index.getAndIncrement()).getClass()).isEqualTo(PartEventHttpMessageReader.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(KotlinSerializationCborDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(KotlinSerializationProtobufDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(Jackson2JsonDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(Jackson2SmileDecoder.class);
assertThat(getNextDecoder(readers).getClass()).isEqualTo(Jaxb2XmlDecoder.class);
assertSseReader(readers);
assertStringDecoder(getNextDecoder(readers), false);
}
@Test
void defaultWriters() {
List<HttpMessageWriter<?>> writers = this.configurer.getWriters();
assertThat(writers).hasSize(16);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(ByteArrayEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(ByteBufferEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(DataBufferEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(NettyByteBufEncoder.class);
assertThat(writers.get(index.getAndIncrement()).getClass()).isEqualTo(ResourceHttpMessageWriter.class);
assertStringEncoder(getNextEncoder(writers), true);
assertThat(writers.get(index.getAndIncrement()).getClass()).isEqualTo(ProtobufHttpMessageWriter.class);
assertThat(writers.get(this.index.getAndIncrement()).getClass()).isEqualTo(MultipartHttpMessageWriter.class);
assertThat(writers.get(this.index.getAndIncrement()).getClass()).isEqualTo(PartEventHttpMessageWriter.class);
assertThat(writers.get(this.index.getAndIncrement()).getClass()).isEqualTo(PartHttpMessageWriter.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(KotlinSerializationCborEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(KotlinSerializationProtobufEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(Jackson2JsonEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(Jackson2SmileEncoder.class);
assertThat(getNextEncoder(writers).getClass()).isEqualTo(Jaxb2XmlEncoder.class);
assertStringEncoder(getNextEncoder(writers), false);
}
@Test
void jackson2CodecCustomization() {
Jackson2JsonDecoder decoder = new Jackson2JsonDecoder();
Jackson2JsonEncoder encoder = new Jackson2JsonEncoder();
this.configurer.defaultCodecs().jackson2JsonDecoder(decoder);
this.configurer.defaultCodecs().jackson2JsonEncoder(encoder);
List<HttpMessageReader<?>> readers = this.configurer.getReaders();
Jackson2JsonDecoder actualDecoder = findCodec(readers, Jackson2JsonDecoder.class);
assertThat(actualDecoder).isSameAs(decoder);
assertThat(findCodec(readers, ServerSentEventHttpMessageReader.class).getDecoder()).isSameAs(decoder);
List<HttpMessageWriter<?>> writers = this.configurer.getWriters();
Jackson2JsonEncoder actualEncoder = findCodec(writers, Jackson2JsonEncoder.class);
assertThat(actualEncoder).isSameAs(encoder);
MultipartHttpMessageWriter multipartWriter = findCodec(writers, MultipartHttpMessageWriter.class);
actualEncoder = findCodec(multipartWriter.getPartWriters(), Jackson2JsonEncoder.class);
assertThat(actualEncoder).isSameAs(encoder);
}
@Test
void objectMapperCustomization() {
ObjectMapper objectMapper = new ObjectMapper();
this.configurer.defaultCodecs().configureDefaultCodec(codec -> {
if (codec instanceof Jackson2CodecSupport) {
((Jackson2CodecSupport) codec).setObjectMapper(objectMapper);
}
});
List<HttpMessageReader<?>> readers = this.configurer.getReaders();
Jackson2JsonDecoder actualDecoder = findCodec(readers, Jackson2JsonDecoder.class);
assertThat(actualDecoder.getObjectMapper()).isSameAs(objectMapper);
List<HttpMessageWriter<?>> writers = this.configurer.getWriters();
Jackson2JsonEncoder actualEncoder = findCodec(writers, Jackson2JsonEncoder.class);
assertThat(actualEncoder.getObjectMapper()).isSameAs(objectMapper);
MultipartHttpMessageWriter multipartWriter = findCodec(writers, MultipartHttpMessageWriter.class);
actualEncoder = findCodec(multipartWriter.getPartWriters(), Jackson2JsonEncoder.class);
assertThat(actualEncoder.getObjectMapper()).isSameAs(objectMapper);
}
@Test
void maxInMemorySize() {
int size = 99;
this.configurer.defaultCodecs().maxInMemorySize(size);
List<HttpMessageReader<?>> readers = this.configurer.getReaders();
assertThat(readers).hasSize(18);
assertThat(((ByteArrayDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((ByteBufferDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((DataBufferDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((NettyByteBufDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((ResourceDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((StringDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((ProtobufDecoder) getNextDecoder(readers)).getMaxMessageSize()).isEqualTo(size);
assertThat(((FormHttpMessageReader) nextReader(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((DefaultPartHttpMessageReader) nextReader(readers)).getMaxInMemorySize()).isEqualTo(size);
nextReader(readers);
assertThat(((PartEventHttpMessageReader) nextReader(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((KotlinSerializationCborDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((KotlinSerializationProtobufDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((Jackson2JsonDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((Jackson2SmileDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
assertThat(((Jaxb2XmlDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
ServerSentEventHttpMessageReader reader = (ServerSentEventHttpMessageReader) nextReader(readers);
assertThat(reader.getMaxInMemorySize()).isEqualTo(size);
assertThat(((Jackson2JsonDecoder) reader.getDecoder()).getMaxInMemorySize()).isEqualTo(size);
assertThat(((StringDecoder) getNextDecoder(readers)).getMaxInMemorySize()).isEqualTo(size);
}
@Test
void enableLoggingRequestDetails() {
this.configurer.defaultCodecs().enableLoggingRequestDetails(true);
List<HttpMessageWriter<?>> writers = this.configurer.getWriters();
MultipartHttpMessageWriter multipartWriter = findCodec(writers, MultipartHttpMessageWriter.class);
assertThat(multipartWriter.isEnableLoggingRequestDetails()).isTrue();
FormHttpMessageWriter formWriter = (FormHttpMessageWriter) multipartWriter.getFormWriter();
assertThat(formWriter).isNotNull();
assertThat(formWriter.isEnableLoggingRequestDetails()).isTrue();
}
@Test
void clonedConfigurer() {
ClientCodecConfigurer clone = this.configurer.clone();
Jackson2JsonDecoder jackson2Decoder = new Jackson2JsonDecoder();
clone.defaultCodecs().serverSentEventDecoder(jackson2Decoder);
clone.defaultCodecs().multipartCodecs().encoder(new Jackson2SmileEncoder());
clone.defaultCodecs().multipartCodecs().writer(new ResourceHttpMessageWriter());
Decoder<?> sseDecoder = findCodec(clone.getReaders(), ServerSentEventHttpMessageReader.class).getDecoder();
List<HttpMessageWriter<?>> writers = findCodec(clone.getWriters(), MultipartHttpMessageWriter.class).getPartWriters();
assertThat(sseDecoder).isSameAs(jackson2Decoder);
assertThat(writers).hasSize(2);
sseDecoder = findCodec(this.configurer.getReaders(), ServerSentEventHttpMessageReader.class).getDecoder();
writers = findCodec(this.configurer.getWriters(), MultipartHttpMessageWriter.class).getPartWriters();
assertThat(sseDecoder).isNotSameAs(jackson2Decoder);
assertThat(writers).hasSize(16);
}
@Test
public void cloneShouldNotDropMultipartCodecs() {
ClientCodecConfigurer clone = this.configurer.clone();
List<HttpMessageWriter<?>> writers =
findCodec(clone.getWriters(), MultipartHttpMessageWriter.class).getPartWriters();
assertThat(writers).hasSize(16);
}
@Test
void cloneShouldNotBeImpactedByChangesToOriginal() {
ClientCodecConfigurer clone = this.configurer.clone();
this.configurer.registerDefaults(false);
this.configurer.customCodecs().register(new Jackson2JsonEncoder());
List<HttpMessageWriter<?>> writers =
findCodec(clone.getWriters(), MultipartHttpMessageWriter.class).getPartWriters();
assertThat(writers).hasSize(16);
}
private Decoder<?> getNextDecoder(List<HttpMessageReader<?>> readers) {
HttpMessageReader<?> reader = readers.get(this.index.getAndIncrement());
assertThat(reader).isInstanceOf(DecoderHttpMessageReader.class);
return ((DecoderHttpMessageReader<?>) reader).getDecoder();
}
private HttpMessageReader<?> nextReader(List<HttpMessageReader<?>> readers) {
return readers.get(this.index.getAndIncrement());
}
private Encoder<?> getNextEncoder(List<HttpMessageWriter<?>> writers) {
HttpMessageWriter<?> writer = writers.get(this.index.getAndIncrement());
assertThat(writer.getClass()).isEqualTo(EncoderHttpMessageWriter.class);
return ((EncoderHttpMessageWriter<?>) writer).getEncoder();
}
@SuppressWarnings("unchecked")
private <T> T findCodec(List<?> codecs, Class<T> [MASK]) {
return (T) codecs.stream()
.map(c -> {
if (c instanceof EncoderHttpMessageWriter) {
return ((EncoderHttpMessageWriter<?>) c).getEncoder();
}
else if (c instanceof DecoderHttpMessageReader) {
return ((DecoderHttpMessageReader<?>) c).getDecoder();
}
else {
return c;
}
})
.filter([MASK]::isInstance).findFirst().get();
}
@SuppressWarnings("unchecked")
private void assertStringDecoder(Decoder<?> decoder, boolean textOnly) {
assertThat(decoder.getClass()).isEqualTo(StringDecoder.class);
assertThat(decoder.canDecode(forClass(String.class), MimeTypeUtils.TEXT_PLAIN)).isTrue();
Object expected = !textOnly;
assertThat(decoder.canDecode(forClass(String.class), MediaType.TEXT_EVENT_STREAM)).isEqualTo(expected);
byte[] bytes = "line1\nline2".getBytes(StandardCharsets.UTF_8);
Flux<String> decoded = (Flux<String>) decoder.decode(
Flux.just(DefaultDataBufferFactory.sharedInstance.wrap(bytes)),
ResolvableType.forClass(String.class), MimeTypeUtils.TEXT_PLAIN, Collections.emptyMap());
assertThat(decoded.collectList().block(Duration.ZERO)).isEqualTo(Arrays.asList("line1", "line2"));
}
private void assertStringEncoder(Encoder<?> encoder, boolean textOnly) {
assertThat(encoder.getClass()).isEqualTo(CharSequenceEncoder.class);
assertThat(encoder.canEncode(forClass(String.class), MimeTypeUtils.TEXT_PLAIN)).isTrue();
Object expected = !textOnly;
assertThat(encoder.canEncode(forClass(String.class), MediaType.TEXT_EVENT_STREAM)).isEqualTo(expected);
}
private void assertSseReader(List<HttpMessageReader<?>> readers) {
HttpMessageReader<?> reader = readers.get(this.index.getAndIncrement());
assertThat(reader.getClass()).isEqualTo(ServerSentEventHttpMessageReader.class);
Decoder<?> decoder = ((ServerSentEventHttpMessageReader) reader).getDecoder();
assertThat(decoder).isNotNull();
assertThat(decoder.getClass()).isEqualTo(Jackson2JsonDecoder.class);
}
} | type | java | spring-framework |
package org.springframework.boot.ansi;
public enum AnsiBackground implements AnsiElement {
DEFAULT("49"),
BLACK("40"),
RED("41"),
GREEN("42"),
YELLOW("43"),
BLUE("44"),
MAGENTA("45"),
CYAN("46"),
WHITE("47"),
BRIGHT_BLACK("100"),
BRIGHT_RED("101"),
BRIGHT_GREEN("102"),
BRIGHT_YELLOW("103"),
BRIGHT_BLUE("104"),
BRIGHT_MAGENTA("105"),
BRIGHT_CYAN("106"),
BRIGHT_WHITE("107");
private final String [MASK];
AnsiBackground(String [MASK]) {
this.[MASK] = [MASK];
}
@Override
public String toString() {
return this.[MASK];
}
} | code | java | spring-boot |
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.xpack.core.security.SecurityContext;
final class Transports {
private Transports() {}
static String username(SecurityContext [MASK]) {
return [MASK] != null && [MASK].getUser() != null ? [MASK].getUser().principal() : null;
}
static String clusterName(ClusterService clusterService) {
return clusterService.getClusterName().value();
}
} | securityContext | java | elasticsearch |
package org.springframework.aot.hint.support;
import org.jspecify.annotations.Nullable;
import org.springframework.aot.hint.RuntimeHints;
import org.springframework.aot.hint.RuntimeHintsRegistrar;
import org.springframework.aot.hint.TypeReference;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
class PathMatchingResourcePatternResolverRuntimeHints implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader [MASK]) {
hints.reflection().registerType(TypeReference.of("org.eclipse.core.runtime.FileLocator"));
}
} | classLoader | java | spring-framework |
package org.elasticsearch.xpack.core.ml.notifications;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xpack.core.common.notifications.Level;
import java.util.Date;
public class DataFrameAnalyticsAuditMessageTests extends AuditMessageTests<DataFrameAnalyticsAuditMessage> {
@Override
public String getJobType() {
return "data_frame_analytics";
}
@Override
protected DataFrameAnalyticsAuditMessage doParseInstance(XContentParser [MASK]) {
return DataFrameAnalyticsAuditMessage.PARSER.apply([MASK], null);
}
@Override
protected DataFrameAnalyticsAuditMessage createTestInstance() {
return new DataFrameAnalyticsAuditMessage(
randomBoolean() ? null : randomAlphaOfLength(10),
randomAlphaOfLengthBetween(1, 20),
randomFrom(Level.values()),
new Date(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20)
);
}
} | parser | java | elasticsearch |
package com.google.common.testing;
import static java.lang.Math.max;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.errorprone.annotations.DoNotMock;
import com.google.j2objc.annotations.J2ObjCIncompatible;
import java.lang.ref.WeakReference;
import java.util.Locale;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import org.jspecify.annotations.NullMarked;
@GwtIncompatible
@J2ktIncompatible
@J2ObjCIncompatible
@NullMarked
public final class GcFinalization {
private GcFinalization() {}
private static long timeoutSeconds() {
return max(10L, Runtime.getRuntime().totalMemory() / (32L * 1024L * 1024L));
}
@SuppressWarnings("removal")
public static void awaitDone(Future<?> [MASK]) {
if ([MASK].isDone()) {
return;
}
long timeoutSeconds = timeoutSeconds();
long deadline = System.nanoTime() + SECONDS.toNanos(timeoutSeconds);
do {
System.runFinalization();
if ([MASK].isDone()) {
return;
}
System.gc();
try {
[MASK].get(1L, SECONDS);
return;
} catch (CancellationException | ExecutionException ok) {
return;
} catch (InterruptedException ie) {
throw new RuntimeException("Unexpected interrupt while waiting for [MASK]", ie);
} catch (TimeoutException tryHarder) {
}
} while (System.nanoTime() - deadline < 0);
throw formatRuntimeException("Future not done within %d second timeout", timeoutSeconds);
}
@SuppressWarnings("removal")
public static void awaitDone(FinalizationPredicate predicate) {
if (predicate.isDone()) {
return;
}
long timeoutSeconds = timeoutSeconds();
long deadline = System.nanoTime() + SECONDS.toNanos(timeoutSeconds);
do {
System.runFinalization();
if (predicate.isDone()) {
return;
}
CountDownLatch done = new CountDownLatch(1);
createUnreachableLatchFinalizer(done);
await(done);
if (predicate.isDone()) {
return;
}
} while (System.nanoTime() - deadline < 0);
throw formatRuntimeException(
"Predicate did not become true within %d second timeout", timeoutSeconds);
}
@SuppressWarnings("removal")
public static void await(CountDownLatch latch) {
if (latch.getCount() == 0) {
return;
}
long timeoutSeconds = timeoutSeconds();
long deadline = System.nanoTime() + SECONDS.toNanos(timeoutSeconds);
do {
System.runFinalization();
if (latch.getCount() == 0) {
return;
}
System.gc();
try {
if (latch.await(1L, SECONDS)) {
return;
}
} catch (InterruptedException ie) {
throw new RuntimeException("Unexpected interrupt while waiting for latch", ie);
}
} while (System.nanoTime() - deadline < 0);
throw formatRuntimeException(
"Latch failed to count down within %d second timeout", timeoutSeconds);
}
private static void createUnreachableLatchFinalizer(CountDownLatch latch) {
Object unused =
new Object() {
@SuppressWarnings({"removal", "Finalize"})
@Override
protected void finalize() {
latch.countDown();
}
};
}
@DoNotMock("Implement with a lambda")
public interface FinalizationPredicate {
boolean isDone();
}
public static void awaitClear(WeakReference<?> ref) {
awaitDone(() -> ref.get() == null);
}
@SuppressWarnings({"removal", "Finalize"})
public static void awaitFullGc() {
CountDownLatch finalizerRan = new CountDownLatch(1);
WeakReference<Object> ref =
new WeakReference<>(
new Object() {
@Override
protected void finalize() {
finalizerRan.countDown();
}
});
await(finalizerRan);
awaitClear(ref);
System.runFinalization();
}
private static RuntimeException formatRuntimeException(String format, Object... args) {
return new RuntimeException(String.format(Locale.ROOT, format, args));
}
} | future | java | guava |
package com.google.common.primitives;
import static com.google.common.primitives.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.primitives.TestPlatform.reduceIterationsIfGwt;
import static com.google.common.testing.SerializableTester.reserialize;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.stream;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.testing.ListTestSuiteBuilder;
import com.google.common.collect.testing.SampleElements;
import com.google.common.collect.testing.TestListGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.testing.EqualsTester;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.LongStream;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jspecify.annotations.NullUnmarked;
@GwtCompatible(emulated = true)
@NullUnmarked
public class ImmutableLongArrayTest extends TestCase {
public void testOf0() {
assertThat(ImmutableLongArray.of().asList()).isEmpty();
}
public void testOf1() {
assertThat(ImmutableLongArray.of(0).asList()).containsExactly(0L);
}
public void testOf2() {
assertThat(ImmutableLongArray.of(0, 1).asList()).containsExactly(0L, 1L).inOrder();
}
public void testOf3() {
assertThat(ImmutableLongArray.of(0, 1, 3).asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testOf4() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6).asList())
.containsExactly(0L, 1L, 3L, 6L)
.inOrder();
}
public void testOf5() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L)
.inOrder();
}
public void testOf6() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L, 15L)
.inOrder();
}
public void testOf7() {
assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15, 21).asList())
.containsExactly(0L, 1L, 3L, 6L, 10L, 15L, 21L)
.inOrder();
}
public void testCopyOf_array_empty() {
assertThat(ImmutableLongArray.copyOf(new long[0])).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_array_nonempty() {
long[] array = new long[] {0, 1, 3};
ImmutableLongArray iia = ImmutableLongArray.copyOf(array);
array[2] = 2;
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_iterable_notCollection_empty() {
Iterable<Long> iterable = iterable(Collections.<Long>emptySet());
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_iterable_notCollection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf(iterable(list));
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_iterable_collection_empty() {
Iterable<Long> iterable = Collections.emptySet();
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_iterable_collection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf((Iterable<Long>) list);
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_collection_empty() {
Collection<Long> iterable = Collections.emptySet();
assertThat(ImmutableLongArray.copyOf(iterable)).isSameInstanceAs(ImmutableLongArray.of());
}
public void testCopyOf_collection_nonempty() {
List<Long> list = Arrays.asList(0L, 1L, 3L);
ImmutableLongArray iia = ImmutableLongArray.copyOf(list);
list.set(2, 2L);
assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder();
}
public void testCopyOf_stream() {
assertThat(ImmutableLongArray.copyOf(LongStream.empty()))
.isSameInstanceAs(ImmutableLongArray.of());
assertThat(ImmutableLongArray.copyOf(LongStream.of(0, 1, 3)).asList())
.containsExactly(0L, 1L, 3L)
.inOrder();
}
public void testBuilder_presize_zero() {
ImmutableLongArray.Builder builder = ImmutableLongArray.builder(0);
builder.add(5L);
ImmutableLongArray array = builder.build();
assertThat(array.asList()).containsExactly(5L);
}
public void testBuilder_presize_negative() {
assertThrows(IllegalArgumentException.class, () -> ImmutableLongArray.builder(-1));
}
public void testBuilder_bruteForce() {
for (int i = 0; i < reduceIterationsIfGwt(100); i++) {
ImmutableLongArray.Builder builder = ImmutableLongArray.builder(random.nextInt(20));
AtomicLong counter = new AtomicLong(0);
while (counter.get() < 1000) {
BuilderOp op = BuilderOp.randomOp();
op.doIt(builder, counter);
}
ImmutableLongArray iia = builder.build();
for (int j = 0; j < iia.length(); j++) {
assertThat(iia.get(j)).isEqualTo((long) j);
}
}
}
private enum BuilderOp {
ADD_ONE {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
builder.add(counter.getAndIncrement());
}
},
ADD_ARRAY {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
long[] array = new long[random.nextInt(10)];
for (int i = 0; i < array.length; i++) {
array[i] = counter.getAndIncrement();
}
builder.addAll(array);
}
},
ADD_COLLECTION {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
List<Long> list = new ArrayList<>();
long num = random.nextInt(10);
for (int i = 0; i < num; i++) {
list.add(counter.getAndIncrement());
}
builder.addAll(list);
}
},
ADD_ITERABLE {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
List<Long> list = new ArrayList<>();
long num = random.nextInt(10);
for (int i = 0; i < num; i++) {
list.add(counter.getAndIncrement());
}
builder.addAll(iterable(list));
}
},
ADD_STREAM {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
long[] array = new long[random.nextInt(10)];
for (int i = 0; i < array.length; i++) {
array[i] = counter.getAndIncrement();
}
builder.addAll(stream(array));
}
},
ADD_IIA {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
long[] array = new long[random.nextInt(10)];
for (int i = 0; i < array.length; i++) {
array[i] = counter.getAndIncrement();
}
builder.addAll(ImmutableLongArray.copyOf(array));
}
},
ADD_LARGER_ARRAY {
@Override
void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) {
long[] array = new long[random.nextInt(200) + 200];
for (int i = 0; i < array.length; i++) {
array[i] = counter.getAndIncrement();
}
builder.addAll(array);
}
},
;
static final BuilderOp[] values = values();
static BuilderOp randomOp() {
return values[random.nextInt(values.length)];
}
abstract void doIt(ImmutableLongArray.Builder builder, AtomicLong counter);
}
private static final Random random = new Random(42);
public void testLength() {
assertThat(ImmutableLongArray.of().length()).isEqualTo(0);
assertThat(ImmutableLongArray.of(0).length()).isEqualTo(1);
assertThat(ImmutableLongArray.of(0, 1, 3).length()).isEqualTo(3);
assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 1).length()).isEqualTo(0);
assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 2).length()).isEqualTo(1);
}
public void testIsEmpty() {
assertThat(ImmutableLongArray.of().isEmpty()).isTrue();
assertThat(ImmutableLongArray.of(0).isEmpty()).isFalse();
assertThat(ImmutableLongArray.of(0, 1, 3).isEmpty()).isFalse();
assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 1).isEmpty()).isTrue();
assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 2).isEmpty()).isFalse();
}
public void testGet_good() {
ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3);
assertThat(iia.get(0)).isEqualTo(0L);
assertThat(iia.get(2)).isEqualTo(3L);
assertThat(iia.subArray(1, 3).get(1)).isEqualTo(3L);
}
public void testGet_bad() {
ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3);
assertThrows(IndexOutOfBoundsException.class, () -> iia.get(-1));
assertThrows(IndexOutOfBoundsException.class, () -> iia.get(3));
ImmutableLongArray sub = iia.subArray(1, 2);
assertThrows(IndexOutOfBoundsException.class, () -> sub.get(-1));
}
public void testIndexOf() {
ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8);
assertThat(iia.indexOf(1)).isEqualTo(0);
assertThat(iia.indexOf(8)).isEqualTo(5);
assertThat(iia.indexOf(4)).isEqualTo(-1);
assertThat(ImmutableLongArray.of(13).indexOf(13)).isEqualTo(0);
assertThat(ImmutableLongArray.of().indexOf(21)).isEqualTo(-1);
assertThat(iia.subArray(1, 5).indexOf(1)).isEqualTo(0);
}
public void testLastIndexOf() {
ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8);
assertThat(iia.lastIndexOf(1)).isEqualTo(1);
assertThat(iia.lastIndexOf(8)).isEqualTo(5);
assertThat(iia.lastIndexOf(4)).isEqualTo(-1);
assertThat(ImmutableLongArray.of(13).lastIndexOf(13)).isEqualTo(0);
assertThat(ImmutableLongArray.of().lastIndexOf(21)).isEqualTo(-1);
assertThat(iia.subArray(1, 5).lastIndexOf(1)).isEqualTo(0);
}
public void testContains() {
ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8);
assertThat(iia.contains(1)).isTrue();
assertThat(iia.contains(8)).isTrue();
assertThat(iia.contains(4)).isFalse();
assertThat(ImmutableLongArray.of(13).contains(13)).isTrue();
assertThat(ImmutableLongArray.of().contains(21)).isFalse();
assertThat(iia.subArray(1, 5).contains(1)).isTrue();
}
public void testForEach() {
ImmutableLongArray.of().forEach(i -> fail());
ImmutableLongArray.of(0, 1, 3).subArray(1, 1).forEach(i -> fail());
AtomicLong count = new AtomicLong(0);
ImmutableLongArray.of(0, 1, 2, 3)
.forEach(i -> assertThat(i).isEqualTo(count.getAndIncrement()));
assertThat(count.get()).isEqualTo(4);
}
public void testStream() {
ImmutableLongArray.of().stream().forEach(i -> fail());
ImmutableLongArray.of(0, 1, 3).subArray(1, 1).stream().forEach(i -> fail());
assertThat(ImmutableLongArray.of(0, 1, 3).stream().toArray()).isEqualTo(new long[] {0, 1, 3});
}
public void testSubArray() {
ImmutableLongArray iia0 = ImmutableLongArray.of();
ImmutableLongArray iia1 = ImmutableLongArray.of(5);
ImmutableLongArray iia3 = ImmutableLongArray.of(5, 25, 125);
assertThat(iia0.subArray(0, 0)).isSameInstanceAs(ImmutableLongArray.of());
assertThat(iia1.subArray(0, 0)).isSameInstanceAs(ImmutableLongArray.of());
assertThat(iia1.subArray(1, 1)).isSameInstanceAs(ImmutableLongArray.of());
assertThat(iia1.subArray(0, 1).asList()).containsExactly(5L);
assertThat(iia3.subArray(0, 2).asList()).containsExactly(5L, 25L).inOrder();
assertThat(iia3.subArray(1, 3).asList()).containsExactly(25L, 125L).inOrder();
assertThrows(IndexOutOfBoundsException.class, () -> iia3.subArray(-1, 1));
assertThrows(IndexOutOfBoundsException.class, () -> iia3.subArray(1, 4));
}
private static <T> Iterable<T> iterable(final Collection<T> collection) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return collection.iterator();
}
};
}
public void testEquals() {
new EqualsTester()
.addEqualityGroup(ImmutableLongArray.of())
.addEqualityGroup(
ImmutableLongArray.of(1, 2),
reserialize(ImmutableLongArray.of(1, 2)),
ImmutableLongArray.of(0, 1, 2, 3).subArray(1, 3))
.addEqualityGroup(ImmutableLongArray.of(1, 3))
.addEqualityGroup(ImmutableLongArray.of(1, 2, 3))
.testEquals();
}
public void testTrimmed() {
ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3);
assertDoesntActuallyTrim(iia);
assertDoesntActuallyTrim(iia.subArray(0, 3));
assertActuallyTrims(iia.subArray(0, 2));
assertActuallyTrims(iia.subArray(1, 3));
ImmutableLongArray rightSized = ImmutableLongArray.builder(3).add(0).add(1).add(3).build();
assertDoesntActuallyTrim(rightSized);
ImmutableLongArray overSized = ImmutableLongArray.builder(3).add(0).add(1).build();
assertActuallyTrims(overSized);
ImmutableLongArray underSized = ImmutableLongArray.builder(2).add(0).add(1).add(3).build();
assertActuallyTrims(underSized);
}
@J2ktIncompatible
@GwtIncompatible
public void testSerialization() {
assertThat(reserialize(ImmutableLongArray.of())).isSameInstanceAs(ImmutableLongArray.of());
assertThat(reserialize(ImmutableLongArray.of(0, 1).subArray(1, 1)))
.isSameInstanceAs(ImmutableLongArray.of());
ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3, 6).subArray(1, 3);
ImmutableLongArray iia2 = reserialize(iia);
assertThat(iia2).isEqualTo(iia);
assertDoesntActuallyTrim(iia2);
}
private static void assertActuallyTrims(ImmutableLongArray iia) {
ImmutableLongArray trimmed = iia.trimmed();
assertThat(trimmed).isNotSameInstanceAs(iia);
assertThat(trimmed.toArray()).isEqualTo(iia.toArray());
}
private static void assertDoesntActuallyTrim(ImmutableLongArray iia) {
assertThat(iia.trimmed()).isSameInstanceAs(iia);
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static Test suite() {
List<ListTestSuiteBuilder<Long>> builders =
ImmutableList.of(
ListTestSuiteBuilder.using(new ImmutableLongArrayAsListGenerator())
.named("ImmutableLongArray.asList"),
ListTestSuiteBuilder.using(new ImmutableLongArrayHeadSubListAsListGenerator())
.named("ImmutableLongArray.asList, head subList"),
ListTestSuiteBuilder.using(new ImmutableLongArrayTailSubListAsListGenerator())
.named("ImmutableLongArray.asList, tail subList"),
ListTestSuiteBuilder.using(new ImmutableLongArrayMiddleSubListAsListGenerator())
.named("ImmutableLongArray.asList, middle subList"));
TestSuite suite = new TestSuite();
for (ListTestSuiteBuilder<Long> builder : builders) {
suite.addTest(
builder
.withFeatures(
CollectionSize.ZERO,
CollectionSize.ONE,
CollectionSize.SEVERAL,
CollectionFeature.ALLOWS_NULL_QUERIES,
CollectionFeature.RESTRICTS_ELEMENTS,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS)
.createTestSuite());
}
suite.addTestSuite(ImmutableLongArrayTest.class);
return suite;
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
private static ImmutableLongArray makeArray(Long[] values) {
return ImmutableLongArray.copyOf(Arrays.asList(values));
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static final class ImmutableLongArrayAsListGenerator extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
return makeArray(elements).asList();
}
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static final class ImmutableLongArrayHeadSubListAsListGenerator
extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
Long[] [MASK] = {Long.MIN_VALUE, Long.MAX_VALUE};
Long[] all = concat(elements, [MASK]);
return makeArray(all).subArray(0, elements.length).asList();
}
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static final class ImmutableLongArrayTailSubListAsListGenerator
extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
Long[] prefix = {86L, 99L};
Long[] all = concat(prefix, elements);
return makeArray(all).subArray(2, elements.length + 2).asList();
}
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static final class ImmutableLongArrayMiddleSubListAsListGenerator
extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
Long[] prefix = {Long.MIN_VALUE, Long.MAX_VALUE};
Long[] [MASK] = {86L, 99L};
Long[] all = concat(concat(prefix, elements), [MASK]);
return makeArray(all).subArray(2, elements.length + 2).asList();
}
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
private static Long[] concat(Long[] a, Long[] b) {
return ObjectArrays.concat(a, b, Long.class);
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public abstract static class TestLongListGenerator implements TestListGenerator<Long> {
@Override
public SampleElements<Long> samples() {
return new SampleLongs();
}
@Override
public List<Long> create(Object... elements) {
Long[] array = new Long[elements.length];
int i = 0;
for (Object e : elements) {
array[i++] = (Long) e;
}
return create(array);
}
protected abstract List<Long> create(Long[] elements);
@Override
public Long[] createArray(int length) {
return new Long[length];
}
@Override
public List<Long> order(List<Long> insertionOrder) {
return insertionOrder;
}
}
@J2ktIncompatible
@GwtIncompatible
@AndroidIncompatible
public static class SampleLongs extends SampleElements<Long> {
public SampleLongs() {
super(1L << 31, 1L << 33, 1L << 36, 1L << 40, 1L << 45);
}
}
} | suffix | java | guava |
package org.elasticsearch.xpack.inference.services.googlevertexai.rerank;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.inference.services.ConfigurationParseContext;
import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiService;
import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject;
import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID;
import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString;
import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString;
import static org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiServiceFields.PROJECT_ID;
public class GoogleVertexAiRerankServiceSettings extends FilteredXContentObject
implements
ServiceSettings,
GoogleDiscoveryEngineRateLimitServiceSettings {
public static final String NAME = "google_vertex_ai_rerank_service_settings";
private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(300);
public static GoogleVertexAiRerankServiceSettings fromMap(Map<String, Object> map, ConfigurationParseContext context) {
ValidationException [MASK] = new ValidationException();
String projectId = extractRequiredString(map, PROJECT_ID, ModelConfigurations.SERVICE_SETTINGS, [MASK]);
String model = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, [MASK]);
RateLimitSettings rateLimitSettings = RateLimitSettings.of(
map,
DEFAULT_RATE_LIMIT_SETTINGS,
[MASK],
GoogleVertexAiService.NAME,
context
);
if ([MASK].validationErrors().isEmpty() == false) {
throw [MASK];
}
return new GoogleVertexAiRerankServiceSettings(projectId, model, rateLimitSettings);
}
private final String projectId;
private final String modelId;
private final RateLimitSettings rateLimitSettings;
public GoogleVertexAiRerankServiceSettings(String projectId, @Nullable String modelId, @Nullable RateLimitSettings rateLimitSettings) {
this.projectId = Objects.requireNonNull(projectId);
this.modelId = modelId;
this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS);
}
public GoogleVertexAiRerankServiceSettings(StreamInput in) throws IOException {
this.projectId = in.readString();
this.modelId = in.readOptionalString();
this.rateLimitSettings = new RateLimitSettings(in);
}
public String projectId() {
return projectId;
}
@Override
public String modelId() {
return modelId;
}
@Override
public RateLimitSettings rateLimitSettings() {
return rateLimitSettings;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_15_0;
}
@Override
protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException {
builder.field(PROJECT_ID, projectId);
if (modelId != null) {
builder.field(MODEL_ID, modelId);
}
rateLimitSettings.toXContent(builder, params);
return builder;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentFragmentOfExposedFields(builder, params);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(projectId);
out.writeOptionalString(modelId);
rateLimitSettings.writeTo(out);
}
@Override
public boolean equals(Object object) {
if (this == object) return true;
if (object == null || getClass() != object.getClass()) return false;
GoogleVertexAiRerankServiceSettings that = (GoogleVertexAiRerankServiceSettings) object;
return Objects.equals(projectId, that.projectId)
&& Objects.equals(modelId, that.modelId)
&& Objects.equals(rateLimitSettings, that.rateLimitSettings);
}
@Override
public int hashCode() {
return Objects.hash(projectId, modelId, rateLimitSettings);
}
} | validationException | java | elasticsearch |
package org.contacts;
class Contact implements Comparable<Contact> {
private final String name;
private final String [MASK];
public Contact(String name, String [MASK]) {
this.name = name;
this.[MASK] = [MASK];
}
public String getName() {
return name;
}
public String getNumber() {
return [MASK];
}
public int compareTo(Contact o) {
return this.name.compareToIgnoreCase(o.name);
}
} | number | java | j2objc |
package org.elasticsearch.common.bytes;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
import org.elasticsearch.common.util.ByteArray;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.CoreMatchers.equalTo;
public class ReleasableBytesReferenceTests extends AbstractBytesReferenceTestCase {
@Override
protected BytesReference newBytesReference(int length) throws IOException {
return newBytesReferenceWithOffsetOfZero(length);
}
@Override
protected BytesReference newBytesReferenceWithOffsetOfZero(int length) throws IOException {
return newBytesReference(randomByteArrayOfLength(length));
}
@Override
protected BytesReference newBytesReference(byte[] content) throws IOException {
BytesReference delegate;
String composite = "composite";
String paged = "paged";
String array = "array";
String [MASK] = randomFrom(composite, paged, array);
if (array.equals([MASK])) {
final BytesStreamOutput out = new BytesStreamOutput(content.length);
out.writeBytes(content, 0, content.length);
assertThat(content.length, equalTo(out.size()));
BytesArray ref = new BytesArray(out.bytes().toBytesRef().bytes, 0, content.length);
assertThat(content.length, equalTo(ref.length()));
assertThat(ref.length(), Matchers.equalTo(content.length));
delegate = ref;
} else if (paged.equals([MASK])) {
ByteArray byteArray = bigarrays.newByteArray(content.length);
byteArray.set(0, content, 0, content.length);
assertThat(byteArray.size(), Matchers.equalTo((long) content.length));
BytesReference ref = BytesReference.fromByteArray(byteArray, content.length);
assertThat(ref.length(), Matchers.equalTo(content.length));
delegate = ref;
} else {
assert composite.equals([MASK]);
List<BytesReference> referenceList = new ArrayList<>();
for (int i = 0; i < content.length;) {
int remaining = content.length - i;
int sliceLength = randomIntBetween(1, remaining);
ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(sliceLength, bigarrays);
out.writeBytes(content, content.length - remaining, sliceLength);
assertThat(sliceLength, equalTo(out.size()));
referenceList.add(out.bytes());
i += sliceLength;
}
BytesReference ref = CompositeBytesReference.of(referenceList.toArray(new BytesReference[0]));
assertThat(content.length, equalTo(ref.length()));
delegate = ref;
}
return ReleasableBytesReference.wrap(delegate);
}
@Override
public void testToBytesRefSharedPage() throws IOException {
}
@Override
public void testSliceArrayOffset() throws IOException {
}
@Override
public void testSliceToBytesRef() throws IOException {
}
} | type | java | elasticsearch |
package org.openrefine.wikibase.commands;
import static org.mockito.Mockito.when;
import static org.openrefine.wikibase.testing.TestingData.jsonFromFile;
import static org.testng.Assert.assertEquals;
import java.io.IOException;
import java.io.Serializable;
import java.util.Locale;
import javax.servlet.ServletException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.refine.util.LocaleUtils;
import com.google.refine.util.ParsingUtilities;
import org.openrefine.wikibase.testing.TestingData;
import org.openrefine.wikibase.utils.EntityCache;
import org.openrefine.wikibase.utils.EntityCacheStub;
public class PreviewWikibaseSchemaCommandTest extends SchemaCommandTest {
private String localeSetting;
@BeforeMethod
public void SetUp() {
command = new PreviewWikibaseSchemaCommand();
EntityCacheStub entityCacheStub = new EntityCacheStub();
EntityCache.setEntityCache("http:
project = createProject("wiki-warnings-test",
new String[] { "Column 1", "Quebec cultural heritage directory ID" },
new Serializable[][] {
{ "Habitat 67", "98890" }
});
project.rows.get(0).cells.set(0, TestingData.makeMatchedCell("Q1032248", "Habitat 67"));
}
@AfterMethod
public void tearDown() {
EntityCache.removeEntityCache("http:
}
@Test
public void testValidSchema() throws Exception {
String schemaJson = jsonFromFile("schema/inception.json");
String manifestJson = jsonFromFile("manifest/wikidata-manifest-v1.0.json");
when(request.getParameter("schema")).thenReturn(schemaJson);
when(request.getParameter("manifest")).thenReturn(manifestJson);
command.doPost(request, response);
ObjectNode response = ParsingUtilities.evaluateJsonStringToObjectNode(writer.toString());
ArrayNode edits = (ArrayNode) response.get("edits_preview");
assertEquals(edits.size(), 3);
ArrayNode issues = (ArrayNode) response.get("warnings");
assertEquals(issues.size(), 4);
}
@Test
public void testIncompleteSchema() throws IOException, ServletException {
String schemaJson = jsonFromFile("schema/inception_with_errors.json");
String manifestJson = jsonFromFile("manifest/wikidata-manifest-v1.0.json");
when(request.getParameter("schema")).thenReturn(schemaJson);
when(request.getParameter("manifest")).thenReturn(manifestJson);
command.doPost(request, response);
ObjectNode response = ParsingUtilities.evaluateJsonStringToObjectNode(writer.toString());
ArrayNode validationErrors = (ArrayNode) response.get("errors");
assertEquals(validationErrors.size(), 2);
}
@Test
public void testNoManifest() throws IOException, ServletException {
String schemaJson = jsonFromFile("schema/inception.json");
when(request.getParameter("schema")).thenReturn(schemaJson);
command.doPost(request, response);
assertEquals(writer.toString(), "{\"code\":\"error\",\"message\":\"No Wikibase manifest provided.\"}");
}
@Test
public void testInvalidManifest() throws IOException, ServletException {
String schemaJson = jsonFromFile("schema/inception.json");
String manifestJson = "{ invalid manifest";
when(request.getParameter("schema")).thenReturn(schemaJson);
when(request.getParameter("manifest")).thenReturn(manifestJson);
command.doPost(request, response);
assertEquals(writer.toString(),
"{\"code\":\"error\",\"message\":\"Wikibase manifest could not be parsed. Error message: invalid manifest format\"}");
}
@BeforeMethod
public void setLocale() {
localeSetting = Locale.getDefault().getLanguage();
LocaleUtils.setLocale("en");
}
@AfterMethod
public void unsetLocale() {
LocaleUtils.setLocale(localeSetting);
}
@Test
public void testWarningData() throws Exception {
String schemaJson = jsonFromFile("schema/warning_data_test.json");
String manifestJson = jsonFromFile("manifest/wikidata-manifest-v1.0.json");
when(request.getParameter("project")).thenReturn(String.valueOf(project.id));
when(request.getParameter("schema")).thenReturn(schemaJson);
when(request.getParameter("manifest")).thenReturn(manifestJson);
command.doPost(request, response);
ObjectNode response = ParsingUtilities.evaluateJsonStringToObjectNode(writer.toString());
ArrayNode issues = (ArrayNode) response.get("warnings");
boolean existingitemrequirescertainotherstatementwithsuggestedvalue_P633P17 = false;
boolean existingitemrequirescertainotherstatement_P633P18 = false;
for (JsonNode node : issues) {
String aggregationId = node.get("aggregationId").asText();
JsonNode addedPropertyLabel = node.path("properties").path("added_property_entity").path("label");
JsonNode [MASK] = node.path("properties").path("item_entity").path("label");
if (aggregationId.equals("existing-item-requires-property-to-have-certain-values-with-suggested-value_P633P17")) {
assertEquals(addedPropertyLabel.asText(), "country");
assertEquals([MASK].asText(), "Canada");
existingitemrequirescertainotherstatementwithsuggestedvalue_P633P17 = true;
} else if (aggregationId.equals("existing-item-requires-certain-other-statement_P633P18")) {
assertEquals(addedPropertyLabel.asText(), "image");
existingitemrequirescertainotherstatement_P633P18 = true;
}
}
assertEquals(existingitemrequirescertainotherstatementwithsuggestedvalue_P633P17, true);
assertEquals(existingitemrequirescertainotherstatement_P633P18, true);
}
} | itemEntityLabel | java | OpenRefine |
package org.elasticsearch.index.translog;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.index.seqno.SequenceNumbers;
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
final class TranslogSnapshot extends BaseTranslogReader {
private final int totalOperations;
private final Checkpoint checkpoint;
protected final long length;
private final ByteBuffer reusableBuffer;
private long position;
private int skippedOperations;
private int [MASK];
private BufferedChecksumStreamInput reuse;
TranslogSnapshot(final BaseTranslogReader reader, final long length) {
super(reader.generation, reader.channel, reader.path, reader.header);
this.length = length;
this.totalOperations = reader.totalOperations();
this.checkpoint = reader.getCheckpoint();
this.reusableBuffer = ByteBuffer.allocate(1024);
this.[MASK] = 0;
this.position = reader.getFirstOperationOffset();
this.reuse = null;
}
@Override
public int totalOperations() {
return totalOperations;
}
int skippedOperations() {
return skippedOperations;
}
@Override
Checkpoint getCheckpoint() {
return checkpoint;
}
public Translog.Operation next() throws IOException {
while ([MASK] < totalOperations) {
final Translog.Operation operation = readOperation();
if (operation.seqNo() <= checkpoint.trimmedAboveSeqNo || checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
return operation;
}
skippedOperations++;
}
reuse = null;
return null;
}
private Translog.Operation readOperation() throws IOException {
final int opSize = readSize(reusableBuffer, position);
reuse = checksummedStream(reusableBuffer, position, opSize, reuse);
Translog.Operation op = read(reuse);
position += opSize;
[MASK]++;
return op;
}
public long sizeInBytes() {
return length;
}
protected void readBytes(ByteBuffer buffer, long position) throws IOException {
try {
if (position >= length) {
throw new EOFException(
"read requested past EOF. pos ["
+ position
+ "] end: ["
+ length
+ "], generation: ["
+ getGeneration()
+ "], path: ["
+ path
+ "]"
);
}
if (position < getFirstOperationOffset()) {
throw new IOException(
"read requested before position of first ops. pos ["
+ position
+ "] first op on: ["
+ getFirstOperationOffset()
+ "], generation: ["
+ getGeneration()
+ "], path: ["
+ path
+ "]"
);
}
Channels.readFromFileChannelWithEofException(channel, position, buffer);
} catch (EOFException e) {
throw new TranslogCorruptedException(path.toString(), "translog truncated", e);
}
}
@Override
public String toString() {
return "TranslogSnapshot{"
+ "[MASK]="
+ [MASK]
+ ", position="
+ position
+ ", estimateTotalOperations="
+ totalOperations
+ ", length="
+ length
+ ", generation="
+ generation
+ ", reusableBuffer="
+ reusableBuffer
+ '}';
}
} | readOperations | java | elasticsearch |
package org.mockito.internal.junit;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.mockito.invocation.Invocation;
import org.mockito.plugins.MockitoLogger;
class StubbingArgMismatches {
final Map<Invocation, Set<Invocation>> mismatches = new LinkedHashMap<>();
public void add(Invocation invocation, Invocation [MASK]) {
Set<Invocation> matchingInvocations =
mismatches.computeIfAbsent(
[MASK], (Invocation k) -> new LinkedHashSet<Invocation>());
matchingInvocations.add(invocation);
}
public void format(String testName, MockitoLogger logger) {
if (mismatches.isEmpty()) {
return;
}
StubbingHint hint = new StubbingHint(testName);
int x = 1;
for (Map.Entry<Invocation, Set<Invocation>> m : mismatches.entrySet()) {
hint.appendLine(x++, ". Unused... ", m.getKey().getLocation());
for (Invocation invocation : m.getValue()) {
hint.appendLine(" ...args ok? ", invocation.getLocation());
}
}
logger.log(hint.toString());
}
public int size() {
return mismatches.size();
}
@Override
public String toString() {
return "" + mismatches;
}
} | stubbing | java | mockito |
package org.elasticsearch.xpack.inference.services.validation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.inference.InferenceService;
import org.elasticsearch.inference.Model;
public class SimpleModelValidator implements ModelValidator {
private final ServiceIntegrationValidator [MASK];
public SimpleModelValidator(ServiceIntegrationValidator [MASK]) {
this.[MASK] = [MASK];
}
@Override
public void validate(InferenceService service, Model model, TimeValue timeout, ActionListener<Model> listener) {
[MASK].validate(service, model, timeout, listener.delegateFailureAndWrap((delegate, r) -> {
delegate.onResponse(model);
}));
}
} | serviceIntegrationValidator | java | elasticsearch |
package org.springframework.boot.autoconfigure.transaction;
import org.springframework.transaction.TransactionManager;
public interface TransactionManagerCustomizer<T extends TransactionManager> {
void customize(T [MASK]);
} | transactionManager | java | spring-boot |
package org.springframework.web.reactive.function.server;
import java.io.IOException;
import java.nio.file.Files;
import java.util.List;
import java.util.Set;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.codec.HttpMessageWriter;
import org.springframework.web.reactive.result.view.ViewResolver;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpRequest;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpResponse;
import org.springframework.web.testfixture.server.MockServerWebExchange;
import static org.assertj.core.api.Assertions.assertThat;
class ResourceHandlerFunctionTests {
private final Resource resource = new ClassPathResource("response.txt", getClass());
private final ResourceHandlerFunction handlerFunction = new ResourceHandlerFunction(this.resource, (r, h) -> {});
private ServerResponse.Context context;
@BeforeEach
void createContext() {
HandlerStrategies [MASK] = HandlerStrategies.withDefaults();
context = new ServerResponse.Context() {
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return [MASK].messageWriters();
}
@Override
public List<ViewResolver> viewResolvers() {
return [MASK].viewResolvers();
}
};
}
@Test
void get() throws IOException {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("http:
MockServerHttpResponse mockResponse = exchange.getResponse();
ServerRequest request = new DefaultServerRequest(exchange, HandlerStrategies.withDefaults().messageReaders());
Mono<ServerResponse> responseMono = this.handlerFunction.handle(request);
Mono<Void> result = responseMono.flatMap(response -> {
assertThat(response.statusCode()).isEqualTo(HttpStatus.OK);
boolean condition = response instanceof EntityResponse;
assertThat(condition).isTrue();
@SuppressWarnings("unchecked")
EntityResponse<Resource> entityResponse = (EntityResponse<Resource>) response;
assertThat(entityResponse.entity()).isEqualTo(this.resource);
return response.writeTo(exchange, context);
});
StepVerifier.create(result)
.expectComplete()
.verify();
byte[] expectedBytes = Files.readAllBytes(this.resource.getFile().toPath());
StepVerifier.create(mockResponse.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
assertThat(resultBytes).isEqualTo(expectedBytes);
})
.expectComplete()
.verify();
assertThat(mockResponse.getHeaders().getContentType()).isEqualTo(MediaType.TEXT_PLAIN);
assertThat(mockResponse.getHeaders().getContentLength()).isEqualTo(this.resource.contentLength());
}
@Test
void head() throws IOException {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.head("http:
MockServerHttpResponse mockResponse = exchange.getResponse();
ServerRequest request = new DefaultServerRequest(exchange, HandlerStrategies.withDefaults().messageReaders());
Mono<ServerResponse> responseMono = this.handlerFunction.handle(request);
Mono<Void> result = responseMono.flatMap(response -> {
assertThat(response.statusCode()).isEqualTo(HttpStatus.OK);
boolean condition = response instanceof EntityResponse;
assertThat(condition).isTrue();
@SuppressWarnings("unchecked")
EntityResponse<Resource> entityResponse = (EntityResponse<Resource>) response;
assertThat(entityResponse.entity().getFilename()).isEqualTo(this.resource.getFilename());
return response.writeTo(exchange, context);
});
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(mockResponse.getBody()).expectComplete().verify();
assertThat(mockResponse.getHeaders().getContentType()).isEqualTo(MediaType.TEXT_PLAIN);
assertThat(mockResponse.getHeaders().getContentLength()).isEqualTo(this.resource.contentLength());
}
@Test
void options() {
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.options("http:
MockServerHttpResponse mockResponse = exchange.getResponse();
ServerRequest request = new DefaultServerRequest(exchange, HandlerStrategies.withDefaults().messageReaders());
Mono<ServerResponse> responseMono = this.handlerFunction.handle(request);
Mono<Void> result = responseMono.flatMap(response -> {
assertThat(response.statusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.headers().getAllow()).isEqualTo(Set.of(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.OPTIONS));
return response.writeTo(exchange, context);
});
StepVerifier.create(result)
.expectComplete()
.verify();
assertThat(mockResponse.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(mockResponse.getHeaders().getAllow()).isEqualTo(Set.of(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.OPTIONS));
StepVerifier.create(mockResponse.getBody()).expectComplete().verify();
}
} | strategies | java | spring-framework |
package org.springframework.boot.configurationsample.method;
import org.springframework.boot.configurationsample.ConfigurationProperties;
public class PublicMethodConfig {
@ConfigurationProperties("foo")
public Foo foo() {
return new Foo();
}
public static class Foo {
private String name;
private boolean [MASK];
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public boolean isFlag() {
return this.[MASK];
}
public void setFlag(boolean [MASK]) {
this.[MASK] = [MASK];
}
}
} | flag | java | spring-boot |
package org.elasticsearch.index.search;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.spans.SpanNearQuery;
import org.apache.lucene.queries.spans.SpanOrQuery;
import org.apache.lucene.queries.spans.SpanQuery;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.queryparser.classic.Token;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BoostAttribute;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.ExistsQueryBuilder;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.query.ZeroTermsQueryOption;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
import static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery;
import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery;
import static org.elasticsearch.index.search.QueryParserHelper.checkForTooManyFields;
import static org.elasticsearch.index.search.QueryParserHelper.resolveMappingField;
import static org.elasticsearch.index.search.QueryParserHelper.resolveMappingFields;
public class QueryStringQueryParser extends QueryParser {
private static final String EXISTS_FIELD = "_exists_";
private final SearchExecutionContext context;
private final Map<String, Float> fieldsAndWeights;
private final boolean lenient;
private final MultiMatchQueryParser queryBuilder;
private MultiMatchQueryBuilder.Type type = MultiMatchQueryBuilder.Type.BEST_FIELDS;
private Float groupTieBreaker;
private Analyzer forceAnalyzer;
private Analyzer forceQuoteAnalyzer;
private String quoteFieldSuffix;
private boolean analyzeWildcard;
private ZoneId timeZone;
private Fuzziness fuzziness = Fuzziness.AUTO;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;
public QueryStringQueryParser(SearchExecutionContext context, String defaultField) {
this(context, defaultField, Collections.emptyMap(), false);
}
public QueryStringQueryParser(SearchExecutionContext context, String defaultField, boolean lenient) {
this(context, defaultField, Collections.emptyMap(), lenient);
}
public QueryStringQueryParser(SearchExecutionContext context, Map<String, Float> fieldsAndWeights) {
this(context, null, fieldsAndWeights, false);
}
public QueryStringQueryParser(SearchExecutionContext context, Map<String, Float> fieldsAndWeights, boolean lenient) {
this(context, null, fieldsAndWeights, lenient);
}
public QueryStringQueryParser(SearchExecutionContext context, boolean lenient) {
this(context, "*", resolveMappingField(context, "*", 1.0f, false, false, null), lenient);
}
private QueryStringQueryParser(
SearchExecutionContext context,
String defaultField,
Map<String, Float> fieldsAndWeights,
boolean lenient
) {
super(defaultField, context.getIndexAnalyzers().getDefaultSearchAnalyzer());
this.context = context;
this.fieldsAndWeights = Collections.unmodifiableMap(fieldsAndWeights);
this.queryBuilder = new MultiMatchQueryParser(context);
queryBuilder.setZeroTermsQuery(ZeroTermsQueryOption.NULL);
queryBuilder.setLenient(lenient);
this.lenient = lenient;
}
@Override
public void setEnablePositionIncrements(boolean enable) {
super.setEnablePositionIncrements(enable);
queryBuilder.setEnablePositionIncrements(enable);
}
@Override
public void setDefaultOperator(Operator op) {
super.setDefaultOperator(op);
queryBuilder.setOccur(op == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD);
}
@Override
public void setPhraseSlop(int phraseSlop) {
super.setPhraseSlop(phraseSlop);
queryBuilder.setPhraseSlop(phraseSlop);
}
public void setType(MultiMatchQueryBuilder.Type type) {
this.type = type;
}
public void setFuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
}
public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public void setFuzzyMaxExpansions(int fuzzyMaxExpansions) {
this.fuzzyMaxExpansions = fuzzyMaxExpansions;
}
public void setForceAnalyzer(Analyzer analyzer) {
this.forceAnalyzer = analyzer;
}
public void setForceQuoteAnalyzer(Analyzer analyzer) {
this.forceQuoteAnalyzer = analyzer;
}
public void setQuoteFieldSuffix(String quoteFieldSuffix) {
this.quoteFieldSuffix = quoteFieldSuffix;
}
public void setAnalyzeWildcard(boolean analyzeWildcard) {
this.analyzeWildcard = analyzeWildcard;
}
public void setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
}
public void setGroupTieBreaker(float groupTieBreaker) {
queryBuilder.setTieBreaker(groupTieBreaker);
this.groupTieBreaker = groupTieBreaker;
}
@Override
public void setAutoGenerateMultiTermSynonymsPhraseQuery(boolean enable) {
queryBuilder.setAutoGenerateSynonymsPhraseQuery(enable);
}
public void setFuzzyTranspositions(boolean fuzzyTranspositions) {
this.fuzzyTranspositions = fuzzyTranspositions;
}
private static Query applyBoost(Query q, Float boost) {
if (boost != null && boost != 1f) {
return new BoostQuery(q, boost);
}
return q;
}
private Map<String, Float> extractMultiFields(String field, boolean quoted) {
Map<String, Float> extractedFields;
if (field != null) {
boolean allFields = Regex.isMatchAllPattern(field);
if (allFields && this.field != null && this.field.equals(field)) {
extractedFields = fieldsAndWeights;
} else {
boolean multiFields = Regex.isSimpleMatchPattern(field);
extractedFields = resolveMappingField(
context,
field,
1.0f,
allFields == false,
multiFields == false,
quoted ? quoteFieldSuffix : null
);
}
} else if (quoted && quoteFieldSuffix != null) {
extractedFields = resolveMappingFields(context, fieldsAndWeights, quoteFieldSuffix);
} else {
extractedFields = fieldsAndWeights;
}
checkForTooManyFields(extractedFields.size(), field);
return extractedFields;
}
@Override
protected Query newMatchAllDocsQuery() {
return Queries.newMatchAllQuery();
}
@Override
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
if (field != null && EXISTS_FIELD.equals(field)) {
return existsQuery(queryText);
}
if (quoted) {
return getFieldQuery(field, queryText, getPhraseSlop());
}
if (field != null) {
if (queryText.length() > 1) {
if (queryText.charAt(0) == '>') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuery(field, queryText.substring(2), null, true, true);
}
}
return getRangeQuery(field, queryText.substring(1), null, false, true);
} else if (queryText.charAt(0) == '<') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuery(field, null, queryText.substring(2), true, true);
}
}
return getRangeQuery(field, null, queryText.substring(1), true, false);
}
if (context.getFieldType(field) instanceof DateFieldType && this.timeZone != null) {
return getRangeQuery(field, queryText, queryText, true, true);
}
}
}
Map<String, Float> fields = extractMultiFields(field, quoted);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(field);
}
Analyzer oldAnalyzer = queryBuilder.analyzer;
try {
if (forceAnalyzer != null) {
queryBuilder.setAnalyzer(forceAnalyzer);
}
return queryBuilder.parse(type, fields, queryText, null);
} catch (IOException e) {
throw new ParseException(e.getMessage());
} finally {
queryBuilder.setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException {
if (field != null && EXISTS_FIELD.equals(field)) {
return existsQuery(queryText);
}
Map<String, Float> fields = extractMultiFields(field, true);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(field);
}
Analyzer oldAnalyzer = queryBuilder.analyzer;
int oldSlop = queryBuilder.phraseSlop;
try {
if (forceQuoteAnalyzer != null) {
queryBuilder.setAnalyzer(forceQuoteAnalyzer);
} else if (forceAnalyzer != null) {
queryBuilder.setAnalyzer(forceAnalyzer);
}
queryBuilder.setPhraseSlop(slop);
Query query = queryBuilder.parse(MultiMatchQueryBuilder.Type.PHRASE, fields, queryText, null);
if (query == null) {
return null;
}
return applySlop(query, slop);
} catch (IOException e) {
throw new ParseException(e.getMessage());
} finally {
queryBuilder.setAnalyzer(oldAnalyzer);
queryBuilder.setPhraseSlop(oldSlop);
}
}
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive)
throws ParseException {
if ("*".equals(part1)) {
part1 = null;
}
if ("*".equals(part2)) {
part2 = null;
}
Map<String, Float> fields = extractMultiFields(field, false);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(field);
}
List<Query> queries = new ArrayList<>();
for (Map.Entry<String, Float> entry : fields.entrySet()) {
Query q = getRangeQuerySingle(entry.getKey(), part1, part2, startInclusive, endInclusive, context);
assert q != null;
queries.add(applyBoost(q, entry.getValue()));
}
if (queries.size() == 1) {
return queries.get(0);
}
float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;
return new DisjunctionMaxQuery(queries, tiebreaker);
}
private Query getRangeQuerySingle(
String field,
String part1,
String part2,
boolean startInclusive,
boolean endInclusive,
SearchExecutionContext context
) {
MappedFieldType currentFieldType = context.getFieldType(field);
if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) {
return newUnmappedFieldQuery(field);
}
try {
Analyzer normalizer = forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer;
BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1);
BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2);
Query rangeQuery = currentFieldType.rangeQuery(
part1Binary,
part2Binary,
startInclusive,
endInclusive,
null,
timeZone,
null,
context
);
return rangeQuery;
} catch (RuntimeException e) {
if (lenient) {
return newLenientFieldQuery(field, e);
}
throw e;
}
}
@Override
protected float getFuzzyDistance(Token fuzzyToken, String termStr) {
if (fuzzyToken.image.length() == 1) {
return fuzziness.asDistance(termStr);
}
return Fuzziness.fromString(fuzzyToken.image.substring(1)).asDistance(termStr);
}
@Override
protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException {
Map<String, Float> fields = extractMultiFields(field, false);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(field);
}
List<Query> queries = new ArrayList<>();
for (Map.Entry<String, Float> entry : fields.entrySet()) {
Query q = getFuzzyQuerySingle(entry.getKey(), termStr, (int) minSimilarity);
assert q != null;
queries.add(applyBoost(q, entry.getValue()));
}
if (queries.size() == 1) {
return queries.get(0);
} else {
float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;
return new DisjunctionMaxQuery(queries, tiebreaker);
}
}
private Query getFuzzyQuerySingle(String field, String termStr, int minSimilarity) throws ParseException {
MappedFieldType currentFieldType = context.getFieldType(field);
if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) {
return newUnmappedFieldQuery(field);
}
try {
Analyzer normalizer = forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer;
BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr);
return currentFieldType.fuzzyQuery(
term,
Fuzziness.fromEdits(minSimilarity),
getFuzzyPrefixLength(),
fuzzyMaxExpansions,
fuzzyTranspositions,
context,
null
);
} catch (RuntimeException e) {
if (lenient) {
return newLenientFieldQuery(field, e);
}
throw e;
}
}
@Override
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
int numEdits = Fuzziness.fromEdits((int) minimumSimilarity).asDistance(term.text());
return fuzzyRewriteMethod == null
? new FuzzyQuery(term, numEdits, prefixLength, fuzzyMaxExpansions, fuzzyTranspositions)
: new FuzzyQuery(term, numEdits, prefixLength, fuzzyMaxExpansions, fuzzyTranspositions, fuzzyRewriteMethod);
}
@Override
protected Query getPrefixQuery(String field, String termStr) throws ParseException {
Map<String, Float> fields = extractMultiFields(field, false);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(termStr);
}
List<Query> queries = new ArrayList<>();
for (Map.Entry<String, Float> entry : fields.entrySet()) {
Query q = getPrefixQuerySingle(entry.getKey(), termStr);
if (q != null) {
queries.add(applyBoost(q, entry.getValue()));
}
}
if (queries.isEmpty()) {
return null;
} else if (queries.size() == 1) {
return queries.get(0);
} else {
float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;
return new DisjunctionMaxQuery(queries, tiebreaker);
}
}
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
Analyzer oldAnalyzer = getAnalyzer();
try {
MappedFieldType currentFieldType = context.getFieldType(field);
if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) {
return newUnmappedFieldQuery(field);
}
setAnalyzer(forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer);
Query query = null;
if (currentFieldType.getTextSearchInfo().isTokenized() == false) {
query = currentFieldType.prefixQuery(termStr, getMultiTermRewriteMethod(), context);
} else {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr, currentFieldType);
}
return query;
} catch (RuntimeException e) {
if (lenient) {
return newLenientFieldQuery(field, e);
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, MappedFieldType currentFieldType) throws ParseException {
if (analyzeWildcard == false) {
return currentFieldType.prefixQuery(
getAnalyzer().normalize(field, termStr).utf8ToString(),
getMultiTermRewriteMethod(),
context
);
}
List<List<String>> tlist;
TokenStream source = null;
try {
try {
source = getAnalyzer().tokenStream(field, termStr);
source.reset();
} catch (IOException e) {
return super.getPrefixQuery(field, termStr);
}
tlist = new ArrayList<>();
List<String> currentPos = new ArrayList<>();
CharTermAttribute [MASK] = source.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posAtt = source.addAttribute(PositionIncrementAttribute.class);
while (true) {
try {
if (source.incrementToken() == false) {
break;
}
} catch (IOException e) {
break;
}
if (currentPos.isEmpty() == false && posAtt.getPositionIncrement() > 0) {
tlist.add(currentPos);
currentPos = new ArrayList<>();
}
currentPos.add([MASK].toString());
}
if (currentPos.isEmpty() == false) {
tlist.add(currentPos);
}
} finally {
if (source != null) {
IOUtils.closeWhileHandlingException(source);
}
}
if (tlist.size() == 0) {
return null;
}
if (tlist.size() == 1 && tlist.get(0).size() == 1) {
return currentFieldType.prefixQuery(tlist.get(0).get(0), getMultiTermRewriteMethod(), context);
}
List<BooleanClause> clauses = new ArrayList<>();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size() - 1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = currentFieldType.prefixQuery(plist.get(0), getMultiTermRewriteMethod(), context);
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)), BoostAttribute.DEFAULT_BOOST);
}
} else if (isLastPos == false) {
SynonymQuery.Builder sb = new SynonymQuery.Builder(field);
for (String synonym : plist) {
sb.addTerm(new Term(field, synonym));
}
posQuery = sb.build();
} else {
List<BooleanClause> innerClauses = new ArrayList<>();
for (String token : plist) {
innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQuery(innerClauses);
}
clauses.add(
new BooleanClause(posQuery, getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD)
);
}
return getBooleanQuery(clauses);
}
private Query existsQuery(String fieldName) {
if (context.isFieldMapped(FieldNamesFieldMapper.NAME) == false) {
return new MatchNoDocsQuery("No mappings yet");
}
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context
.getFieldType(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType.isEnabled() == false) {
return new WildcardQuery(new Term(fieldName, "*"));
}
return ExistsQueryBuilder.newFilter(context, fieldName, false);
}
@Override
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
String actualField = field != null ? field : this.field;
if (termStr.equals("*") && actualField != null) {
if (Regex.isMatchAllPattern(actualField)) {
return newMatchAllDocsQuery();
}
return existsQuery(actualField);
}
Map<String, Float> fields = extractMultiFields(field, false);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(termStr);
}
List<Query> queries = new ArrayList<>();
for (Map.Entry<String, Float> entry : fields.entrySet()) {
Query q = getWildcardQuerySingle(entry.getKey(), termStr);
assert q != null;
queries.add(applyBoost(q, entry.getValue()));
}
if (queries.size() == 1) {
return queries.get(0);
} else {
float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;
return new DisjunctionMaxQuery(queries, tiebreaker);
}
}
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
if ("*".equals(termStr)) {
return existsQuery(field);
}
Analyzer oldAnalyzer = getAnalyzer();
try {
MappedFieldType currentFieldType = queryBuilder.context.getFieldType(field);
if (currentFieldType == null) {
return newUnmappedFieldQuery(field);
}
if (forceAnalyzer != null && (analyzeWildcard || currentFieldType.getTextSearchInfo().isTokenized())) {
setAnalyzer(forceAnalyzer);
return super.getWildcardQuery(currentFieldType.name(), termStr);
}
if (getAllowLeadingWildcard() == false && (termStr.startsWith("*") || termStr.startsWith("?"))) {
throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery");
}
return currentFieldType.normalizedWildcardQuery(termStr, getMultiTermRewriteMethod(), context);
} catch (RuntimeException e) {
if (lenient) {
return newLenientFieldQuery(field, e);
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getRegexpQuery(String field, String termStr) throws ParseException {
final int maxAllowedRegexLength = context.getIndexSettings().getMaxRegexLength();
if (termStr.length() > maxAllowedRegexLength) {
throw new IllegalArgumentException(
"The length of regex ["
+ termStr.length()
+ "] used in the [query_string] has exceeded "
+ "the allowed maximum of ["
+ maxAllowedRegexLength
+ "]. This maximum can be set by changing the ["
+ IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey()
+ "] index level setting."
);
}
Map<String, Float> fields = extractMultiFields(field, false);
if (fields.isEmpty()) {
return newUnmappedFieldQuery(termStr);
}
List<Query> queries = new ArrayList<>();
for (Map.Entry<String, Float> entry : fields.entrySet()) {
Query q = getRegexpQuerySingle(entry.getKey(), termStr);
assert q != null;
queries.add(applyBoost(q, entry.getValue()));
}
if (queries.size() == 1) {
return queries.get(0);
} else {
float tiebreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker;
return new DisjunctionMaxQuery(queries, tiebreaker);
}
}
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
Analyzer oldAnalyzer = getAnalyzer();
try {
MappedFieldType currentFieldType = queryBuilder.context.getFieldType(field);
if (currentFieldType == null) {
return newUnmappedFieldQuery(field);
}
if (forceAnalyzer != null) {
setAnalyzer(forceAnalyzer);
return super.getRegexpQuery(field, termStr);
}
return currentFieldType.regexpQuery(
termStr,
RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT,
0,
getDeterminizeWorkLimit(),
getMultiTermRewriteMethod(),
context
);
} catch (RuntimeException e) {
if (lenient) {
return newLenientFieldQuery(field, e);
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException {
Query q = super.getBooleanQuery(clauses);
if (q == null) {
return null;
}
return fixNegativeQueryIfNeeded(q);
}
private static Query applySlop(Query q, int slop) {
if (q instanceof PhraseQuery) {
assert q instanceof BoostQuery == false;
return addSlopToPhrase((PhraseQuery) q, slop);
} else if (q instanceof MultiPhraseQuery) {
MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q);
builder.setSlop(slop);
return builder.build();
} else if (q instanceof SpanQuery) {
return addSlopToSpan((SpanQuery) q, slop);
} else {
return q;
}
}
private static Query addSlopToSpan(SpanQuery query, int slop) {
if (query instanceof SpanNearQuery) {
return new SpanNearQuery(((SpanNearQuery) query).getClauses(), slop, ((SpanNearQuery) query).isInOrder());
} else if (query instanceof SpanOrQuery) {
SpanQuery[] clauses = new SpanQuery[((SpanOrQuery) query).getClauses().length];
int pos = 0;
for (SpanQuery clause : ((SpanOrQuery) query).getClauses()) {
clauses[pos++] = (SpanQuery) addSlopToSpan(clause, slop);
}
return new SpanOrQuery(clauses);
} else {
return query;
}
}
private static PhraseQuery addSlopToPhrase(PhraseQuery query, int slop) {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = query.getTerms();
final int[] positions = query.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
return builder.build();
}
@Override
public Query parse(String query) throws ParseException {
if (query.trim().isEmpty()) {
return Queries.newMatchNoDocsQuery("Matching no documents because no terms present");
}
return super.parse(query);
}
} | termAtt | java | elasticsearch |
package org.springframework.format.datetime.standard;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.MonthDay;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.time.YearMonth;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.springframework.context.support.EmbeddedValueResolutionSupport;
import org.springframework.format.AnnotationFormatterFactory;
import org.springframework.format.Parser;
import org.springframework.format.Printer;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.util.StringUtils;
public class Jsr310DateTimeFormatAnnotationFormatterFactory extends EmbeddedValueResolutionSupport
implements AnnotationFormatterFactory<DateTimeFormat> {
private static final Set<Class<?>> FIELD_TYPES = Set.of(
Instant.class,
LocalDate.class,
LocalTime.class,
LocalDateTime.class,
ZonedDateTime.class,
OffsetDateTime.class,
OffsetTime.class,
YearMonth.class,
MonthDay.class);
@Override
public final Set<Class<?>> getFieldTypes() {
return FIELD_TYPES;
}
@Override
public Printer<?> getPrinter(DateTimeFormat annotation, Class<?> fieldType) {
DateTimeFormatter formatter = getFormatter(annotation, fieldType);
if (formatter == DateTimeFormatter.ISO_DATE) {
if (isLocal(fieldType)) {
formatter = DateTimeFormatter.ISO_LOCAL_DATE;
}
}
else if (formatter == DateTimeFormatter.ISO_TIME) {
if (isLocal(fieldType)) {
formatter = DateTimeFormatter.ISO_LOCAL_TIME;
}
}
else if (formatter == DateTimeFormatter.ISO_DATE_TIME) {
if (isLocal(fieldType)) {
formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
}
}
return new TemporalAccessorPrinter(formatter);
}
@Override
@SuppressWarnings("unchecked")
public Parser<?> getParser(DateTimeFormat annotation, Class<?> fieldType) {
DateTimeFormatter formatter = getFormatter(annotation, fieldType);
List<String> resolvedFallbackPatterns = new ArrayList<>();
for (String fallbackPattern : annotation.fallbackPatterns()) {
String [MASK] = resolveEmbeddedValue(fallbackPattern);
if (StringUtils.hasLength([MASK])) {
resolvedFallbackPatterns.add([MASK]);
}
}
return new TemporalAccessorParser((Class<? extends TemporalAccessor>) fieldType,
formatter, resolvedFallbackPatterns.toArray(new String[0]), annotation);
}
protected DateTimeFormatter getFormatter(DateTimeFormat annotation, Class<?> fieldType) {
DateTimeFormatterFactory factory = new DateTimeFormatterFactory();
String style = resolveEmbeddedValue(annotation.style());
if (StringUtils.hasLength(style)) {
factory.setStylePattern(style);
}
factory.setIso(annotation.iso());
String pattern = resolveEmbeddedValue(annotation.pattern());
if (StringUtils.hasLength(pattern)) {
factory.setPattern(pattern);
}
return factory.createDateTimeFormatter();
}
private boolean isLocal(Class<?> fieldType) {
return fieldType.getSimpleName().startsWith("Local");
}
} | resolvedFallbackPattern | java | spring-framework |
package smoketest.websocket.jetty.snake;
import java.awt.Color;
import java.util.Random;
public final class SnakeUtils {
public static final int PLAYFIELD_WIDTH = 640;
public static final int PLAYFIELD_HEIGHT = 480;
public static final int GRID_SIZE = 10;
private static final Random [MASK] = new Random();
private SnakeUtils() {
}
public static String getRandomHexColor() {
float hue = [MASK].nextFloat();
float saturation = ([MASK].nextInt(2000) + 1000) / 10000f;
float luminance = 0.9f;
Color color = Color.getHSBColor(hue, saturation, luminance);
return '#' + Integer.toHexString((color.getRGB() & 0xffffff) | 0x1000000).substring(1);
}
public static Location getRandomLocation() {
int x = roundByGridSize([MASK].nextInt(PLAYFIELD_WIDTH));
int y = roundByGridSize([MASK].nextInt(PLAYFIELD_HEIGHT));
return new Location(x, y);
}
private static int roundByGridSize(int value) {
value = value + (GRID_SIZE / 2);
value = value / GRID_SIZE;
value = value * GRID_SIZE;
return value;
}
} | random | java | spring-boot |
package org.elasticsearch.h3;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.geo.Point;
import org.apache.lucene.spatial3d.geom.GeoPoint;
import org.apache.lucene.spatial3d.geom.GeoPolygon;
import org.apache.lucene.spatial3d.geom.GeoPolygonFactory;
import org.apache.lucene.spatial3d.geom.PlanetModel;
import org.apache.lucene.tests.geo.GeoTestUtil;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
public class ParentChildNavigationTests extends ESTestCase {
public void testChildrenSize() {
Point point = GeoTestUtil.nextPoint();
int res = randomInt(H3.MAX_H3_RES - 1);
String h3Address = H3.geoToH3Address(point.getLat(), point.getLon(), res);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> H3.h3ToChildrenSize(h3Address, res));
assertThat(ex.getMessage(), Matchers.containsString("Invalid child resolution"));
ex = expectThrows(IllegalArgumentException.class, () -> H3.h3ToChildrenSize(h3Address, H3.MAX_H3_RES + 1));
assertThat(ex.getMessage(), Matchers.containsString("Invalid child resolution"));
ex = expectThrows(
IllegalArgumentException.class,
() -> H3.h3ToChildrenSize(H3.geoToH3(point.getLat(), point.getLon(), H3.MAX_H3_RES))
);
assertThat(ex.getMessage(), Matchers.containsString("Invalid child resolution"));
assertEquals(H3.h3ToChildrenSize(h3Address), H3.h3ToChildrenSize(h3Address, res + 1));
int childrenRes = Math.min(H3.MAX_H3_RES, res + randomIntBetween(2, 7));
long numChildren = H3.h3ToChildrenSize(h3Address, childrenRes);
assertEquals(numChildren(h3Address, childrenRes), numChildren);
}
private long numChildren(String h3Address, int finalRes) {
if (H3.getResolution(h3Address) == finalRes) {
return 1;
}
long result = 0;
for (int i = 0; i < H3.h3ToChildrenSize(h3Address); i++) {
result += numChildren(H3.childPosToH3(h3Address, i), finalRes);
}
return result;
}
public void testNoChildrenIntersectingSize() {
Point point = GeoTestUtil.nextPoint();
int res = randomInt(H3.MAX_H3_RES - 1);
String h3Address = H3.geoToH3Address(point.getLat(), point.getLon(), res);
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> H3.h3ToNotIntersectingChildrenSize(H3.geoToH3(point.getLat(), point.getLon(), H3.MAX_H3_RES))
);
assertThat(ex.getMessage(), Matchers.containsString("Invalid child resolution"));
long numChildren = H3.h3ToNotIntersectingChildrenSize(h3Address);
assertEquals(H3.h3ToNoChildrenIntersecting(h3Address).length, numChildren);
}
public void testParentChild() {
String[] h3Addresses = H3.getStringRes0Cells();
String h3Address = RandomPicks.randomFrom(random(), h3Addresses);
String[] values = new String[H3.MAX_H3_RES];
values[0] = h3Address;
for (int i = 1; i < H3.MAX_H3_RES; i++) {
h3Addresses = H3.h3ToChildren(h3Address);
Set<String> mySet = Sets.newHashSet(h3Addresses);
assertEquals(mySet.size(), h3Addresses.length);
h3Address = RandomPicks.randomFrom(random(), h3Addresses);
values[i] = h3Address;
}
h3Addresses = H3.h3ToChildren(h3Address);
h3Address = RandomPicks.randomFrom(random(), h3Addresses);
for (int i = H3.MAX_H3_RES - 1; i >= 0; i--) {
h3Address = H3.h3ToParent(h3Address);
assertEquals(values[i], h3Address);
}
}
public void testHexRing() {
String[] h3Addresses = H3.getStringRes0Cells();
for (int i = 1; i < H3.MAX_H3_RES; i++) {
String h3Address = RandomPicks.randomFrom(random(), h3Addresses);
assertEquals(i - 1, H3.getResolution(h3Address));
h3Addresses = H3.h3ToChildren(h3Address);
assertHexRing(i, h3Address, h3Addresses);
}
}
private static final int[] HEX_RING_POSITIONS = new int[] { 2, 0, 1, 4, 3, 5 };
private static final int[] PENT_RING_POSITIONS = new int[] { 0, 1, 3, 2, 4 };
private void assertHexRing(int res, String h3Address, String[] children) {
LatLng latLng = H3.h3ToLatLng(h3Address);
String centerChild = H3.geoToH3Address(latLng.getLatDeg(), latLng.getLonDeg(), res);
assertEquals(children[0], centerChild);
String[] ring = H3.hexRing(centerChild);
int[] positions = H3.isPentagon(centerChild) ? PENT_RING_POSITIONS : HEX_RING_POSITIONS;
for (int i = 1; i < children.length; i++) {
assertEquals(children[i], ring[positions[i - 1]]);
}
}
public void testNoChildrenIntersecting() {
String[] h3Addresses = H3.getStringRes0Cells();
String h3Address = RandomPicks.randomFrom(random(), h3Addresses);
for (int i = 1; i <= H3.MAX_H3_RES; i++) {
h3Addresses = H3.h3ToChildren(h3Address);
assertIntersectingChildren(h3Address, h3Addresses);
h3Address = RandomPicks.randomFrom(random(), h3Addresses);
}
}
private void assertIntersectingChildren(String h3Address, String[] children) {
int size = H3.h3ToNotIntersectingChildrenSize(h3Address);
for (int i = 0; i < size; i++) {
GeoPolygon p = getGeoPolygon(H3.noChildIntersectingPosToH3(h3Address, i));
int [MASK] = 0;
for (String o : children) {
if (p.intersects(getGeoPolygon(o))) {
[MASK]++;
}
}
assertEquals(2, [MASK]);
}
}
private GeoPolygon getGeoPolygon(String h3Address) {
CellBoundary cellBoundary = H3.h3ToGeoBoundary(h3Address);
List<GeoPoint> points = new ArrayList<>(cellBoundary.numPoints());
for (int i = 0; i < cellBoundary.numPoints(); i++) {
LatLng latLng = cellBoundary.getLatLon(i);
points.add(new GeoPoint(PlanetModel.SPHERE, latLng.getLatRad(), latLng.getLonRad()));
}
return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points);
}
public void testHexRingPos() {
String[] h3Addresses = H3.getStringRes0Cells();
for (int i = 0; i < H3.MAX_H3_RES; i++) {
String h3Address = RandomPicks.randomFrom(random(), h3Addresses);
assertHexRing3(h3Address);
h3Addresses = H3.h3ToChildren(h3Address);
}
}
private void assertHexRing3(String h3Address) {
String[] ring = H3.hexRing(h3Address);
assertEquals(ring.length, H3.hexRingSize(h3Address));
for (int i = 0; i < H3.hexRingSize(h3Address); i++) {
assertEquals(ring[i], H3.hexRingPosToH3(h3Address, i));
}
}
} | intersections | java | elasticsearch |
package com.google.inject.spi;
import com.google.inject.Binder;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.Matcher;
public final class TypeListenerBinding implements Element {
private final Object source;
private final Matcher<? super TypeLiteral<?>> [MASK];
private final TypeListener listener;
TypeListenerBinding(
Object source, TypeListener listener, Matcher<? super TypeLiteral<?>> [MASK]) {
this.source = source;
this.listener = listener;
this.[MASK] = [MASK];
}
public TypeListener getListener() {
return listener;
}
public Matcher<? super TypeLiteral<?>> getTypeMatcher() {
return [MASK];
}
@Override
public Object getSource() {
return source;
}
@Override
public <T> T acceptVisitor(ElementVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
public void applyTo(Binder binder) {
binder.withSource(getSource()).bindListener([MASK], listener);
}
} | typeMatcher | java | guice |
package java.util.concurrent.locks;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.AbstractQueuedSynchronizer.Node;
public abstract class AbstractQueuedLongSynchronizer
extends AbstractOwnableSynchronizer
implements java.io.Serializable {
private static final long serialVersionUID = 7373984972572414692L;
protected AbstractQueuedLongSynchronizer() { }
private transient volatile Node head;
private transient volatile Node tail;
private volatile long state;
protected final long getState() {
return state;
}
protected final void setState(long newState) {
U.putLongVolatile(this, STATE, newState);
}
protected final boolean compareAndSetState(long expect, long update) {
return U.compareAndSwapLong(this, STATE, expect, update);
}
static final long SPIN_FOR_TIMEOUT_THRESHOLD = 1000L;
private Node enq(Node node) {
for (;;) {
Node oldTail = tail;
if (oldTail != null) {
U.putObject(node, Node.PREV, oldTail);
if (compareAndSetTail(oldTail, node)) {
oldTail.next = node;
return oldTail;
}
} else {
initializeSyncQueue();
}
}
}
private Node addWaiter(Node mode) {
Node node = new Node(mode);
for (;;) {
Node oldTail = tail;
if (oldTail != null) {
U.putObject(node, Node.PREV, oldTail);
if (compareAndSetTail(oldTail, node)) {
oldTail.next = node;
return node;
}
} else {
initializeSyncQueue();
}
}
}
private void setHead(Node node) {
head = node;
node.thread = null;
node.prev = null;
}
private void unparkSuccessor(Node node) {
int ws = node.waitStatus;
if (ws < 0)
node.compareAndSetWaitStatus(ws, 0);
Node s = node.next;
if (s == null || s.waitStatus > 0) {
s = null;
for (Node p = tail; p != node && p != null; p = p.prev)
if (p.waitStatus <= 0)
s = p;
}
if (s != null)
LockSupport.unpark(s.thread);
}
private void doReleaseShared() {
for (;;) {
Node h = head;
if (h != null && h != tail) {
int ws = h.waitStatus;
if (ws == Node.SIGNAL) {
if (!h.compareAndSetWaitStatus(Node.SIGNAL, 0))
continue;
unparkSuccessor(h);
}
else if (ws == 0 &&
!h.compareAndSetWaitStatus(0, Node.PROPAGATE))
continue;
}
if (h == head)
break;
}
}
private void setHeadAndPropagate(Node node, long propagate) {
Node h = head;
setHead(node);
if (propagate > 0 || h == null || h.waitStatus < 0 ||
(h = head) == null || h.waitStatus < 0) {
Node s = node.next;
if (s == null || s.isShared())
doReleaseShared();
}
}
private void cancelAcquire(Node node) {
if (node == null)
return;
node.thread = null;
Node pred = node.prev;
while (pred.waitStatus > 0)
node.prev = pred = pred.prev;
Node predNext = pred.next;
node.waitStatus = Node.CANCELLED;
if (node == tail && compareAndSetTail(node, pred)) {
pred.compareAndSetNext(predNext, null);
} else {
int ws;
if (pred != head &&
((ws = pred.waitStatus) == Node.SIGNAL ||
(ws <= 0 && pred.compareAndSetWaitStatus(ws, Node.SIGNAL))) &&
pred.thread != null) {
Node next = node.next;
if (next != null && next.waitStatus <= 0)
pred.compareAndSetNext(predNext, next);
} else {
unparkSuccessor(node);
}
node.next = node;
}
}
private static boolean shouldParkAfterFailedAcquire(Node pred, Node node) {
int ws = pred.waitStatus;
if (ws == Node.SIGNAL)
return true;
if (ws > 0) {
do {
node.prev = pred = pred.prev;
} while (pred.waitStatus > 0);
pred.next = node;
} else {
pred.compareAndSetWaitStatus(ws, Node.SIGNAL);
}
return false;
}
static void selfInterrupt() {
Thread.currentThread().interrupt();
}
private final boolean parkAndCheckInterrupt() {
LockSupport.park(this);
return Thread.interrupted();
}
final boolean acquireQueued(final Node node, long arg) {
boolean interrupted = false;
try {
for (;;) {
final Node p = node.predecessor();
if (p == head && tryAcquire(arg)) {
setHead(node);
p.next = null;
return interrupted;
}
if (shouldParkAfterFailedAcquire(p, node))
interrupted |= parkAndCheckInterrupt();
}
} catch (Throwable t) {
cancelAcquire(node);
if (interrupted)
selfInterrupt();
throw t;
}
}
private void doAcquireInterruptibly(long arg)
throws InterruptedException {
final Node node = addWaiter(Node.EXCLUSIVE);
try {
for (;;) {
final Node p = node.predecessor();
if (p == head && tryAcquire(arg)) {
setHead(node);
p.next = null;
return;
}
if (shouldParkAfterFailedAcquire(p, node) &&
parkAndCheckInterrupt())
throw new InterruptedException();
}
} catch (Throwable t) {
cancelAcquire(node);
throw t;
}
}
private boolean doAcquireNanos(long arg, long nanosTimeout)
throws InterruptedException {
if (nanosTimeout <= 0L)
return false;
final long deadline = System.nanoTime() + nanosTimeout;
final Node node = addWaiter(Node.EXCLUSIVE);
try {
for (;;) {
final Node p = node.predecessor();
if (p == head && tryAcquire(arg)) {
setHead(node);
p.next = null;
return true;
}
nanosTimeout = deadline - System.nanoTime();
if (nanosTimeout <= 0L) {
cancelAcquire(node);
return false;
}
if (shouldParkAfterFailedAcquire(p, node) &&
nanosTimeout > SPIN_FOR_TIMEOUT_THRESHOLD)
LockSupport.parkNanos(this, nanosTimeout);
if (Thread.interrupted())
throw new InterruptedException();
}
} catch (Throwable t) {
cancelAcquire(node);
throw t;
}
}
private void doAcquireShared(long arg) {
final Node node = addWaiter(Node.SHARED);
boolean interrupted = false;
try {
for (;;) {
final Node p = node.predecessor();
if (p == head) {
long r = tryAcquireShared(arg);
if (r >= 0) {
setHeadAndPropagate(node, r);
p.next = null;
return;
}
}
if (shouldParkAfterFailedAcquire(p, node))
interrupted |= parkAndCheckInterrupt();
}
} catch (Throwable t) {
cancelAcquire(node);
throw t;
} finally {
if (interrupted)
selfInterrupt();
}
}
private void doAcquireSharedInterruptibly(long arg)
throws InterruptedException {
final Node node = addWaiter(Node.SHARED);
try {
for (;;) {
final Node p = node.predecessor();
if (p == head) {
long r = tryAcquireShared(arg);
if (r >= 0) {
setHeadAndPropagate(node, r);
p.next = null;
return;
}
}
if (shouldParkAfterFailedAcquire(p, node) &&
parkAndCheckInterrupt())
throw new InterruptedException();
}
} catch (Throwable t) {
cancelAcquire(node);
throw t;
}
}
private boolean doAcquireSharedNanos(long arg, long nanosTimeout)
throws InterruptedException {
if (nanosTimeout <= 0L)
return false;
final long deadline = System.nanoTime() + nanosTimeout;
final Node node = addWaiter(Node.SHARED);
try {
for (;;) {
final Node p = node.predecessor();
if (p == head) {
long r = tryAcquireShared(arg);
if (r >= 0) {
setHeadAndPropagate(node, r);
p.next = null;
return true;
}
}
nanosTimeout = deadline - System.nanoTime();
if (nanosTimeout <= 0L) {
cancelAcquire(node);
return false;
}
if (shouldParkAfterFailedAcquire(p, node) &&
nanosTimeout > SPIN_FOR_TIMEOUT_THRESHOLD)
LockSupport.parkNanos(this, nanosTimeout);
if (Thread.interrupted())
throw new InterruptedException();
}
} catch (Throwable t) {
cancelAcquire(node);
throw t;
}
}
protected boolean tryAcquire(long arg) {
throw new UnsupportedOperationException();
}
protected boolean tryRelease(long arg) {
throw new UnsupportedOperationException();
}
protected long tryAcquireShared(long arg) {
throw new UnsupportedOperationException();
}
protected boolean tryReleaseShared(long arg) {
throw new UnsupportedOperationException();
}
protected boolean isHeldExclusively() {
throw new UnsupportedOperationException();
}
public final void acquire(long arg) {
if (!tryAcquire(arg) &&
acquireQueued(addWaiter(Node.EXCLUSIVE), arg))
selfInterrupt();
}
public final void acquireInterruptibly(long arg)
throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
if (!tryAcquire(arg))
doAcquireInterruptibly(arg);
}
public final boolean tryAcquireNanos(long arg, long nanosTimeout)
throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
return tryAcquire(arg) ||
doAcquireNanos(arg, nanosTimeout);
}
public final boolean release(long arg) {
if (tryRelease(arg)) {
Node h = head;
if (h != null && h.waitStatus != 0)
unparkSuccessor(h);
return true;
}
return false;
}
public final void acquireShared(long arg) {
if (tryAcquireShared(arg) < 0)
doAcquireShared(arg);
}
public final void acquireSharedInterruptibly(long arg)
throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
if (tryAcquireShared(arg) < 0)
doAcquireSharedInterruptibly(arg);
}
public final boolean tryAcquireSharedNanos(long arg, long nanosTimeout)
throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
return tryAcquireShared(arg) >= 0 ||
doAcquireSharedNanos(arg, nanosTimeout);
}
public final boolean releaseShared(long arg) {
if (tryReleaseShared(arg)) {
doReleaseShared();
return true;
}
return false;
}
public final boolean hasQueuedThreads() {
for (Node p = tail, h = head; p != h && p != null; p = p.prev)
if (p.waitStatus <= 0)
return true;
return false;
}
public final boolean hasContended() {
return head != null;
}
public final Thread getFirstQueuedThread() {
return (head == tail) ? null : fullGetFirstQueuedThread();
}
private Thread fullGetFirstQueuedThread() {
Node h, s;
Thread st;
if (((h = head) != null && (s = h.next) != null &&
s.prev == head && (st = s.thread) != null) ||
((h = head) != null && (s = h.next) != null &&
s.prev == head && (st = s.thread) != null))
return st;
Thread firstThread = null;
for (Node p = tail; p != null && p != head; p = p.prev) {
Thread t = p.thread;
if (t != null)
firstThread = t;
}
return firstThread;
}
public final boolean isQueued(Thread thread) {
if (thread == null)
throw new NullPointerException();
for (Node p = tail; p != null; p = p.prev)
if (p.thread == thread)
return true;
return false;
}
final boolean apparentlyFirstQueuedIsExclusive() {
Node h, s;
return (h = head) != null &&
(s = h.next) != null &&
!s.isShared() &&
s.thread != null;
}
public final boolean hasQueuedPredecessors() {
Node h, s;
if ((h = head) != null) {
if ((s = h.next) == null || s.waitStatus > 0) {
s = null;
for (Node p = tail; p != h && p != null; p = p.prev) {
if (p.waitStatus <= 0)
s = p;
}
}
if (s != null && s.thread != Thread.currentThread())
return true;
}
return false;
}
public final int getQueueLength() {
int n = 0;
for (Node p = tail; p != null; p = p.prev) {
if (p.thread != null)
++n;
}
return n;
}
public final Collection<Thread> getQueuedThreads() {
ArrayList<Thread> list = new ArrayList<>();
for (Node p = tail; p != null; p = p.prev) {
Thread t = p.thread;
if (t != null)
list.add(t);
}
return list;
}
public final Collection<Thread> getExclusiveQueuedThreads() {
ArrayList<Thread> list = new ArrayList<>();
for (Node p = tail; p != null; p = p.prev) {
if (!p.isShared()) {
Thread t = p.thread;
if (t != null)
list.add(t);
}
}
return list;
}
public final Collection<Thread> getSharedQueuedThreads() {
ArrayList<Thread> list = new ArrayList<>();
for (Node p = tail; p != null; p = p.prev) {
if (p.isShared()) {
Thread t = p.thread;
if (t != null)
list.add(t);
}
}
return list;
}
public String toString() {
return super.toString()
+ "[State = " + getState() + ", "
+ (hasQueuedThreads() ? "non" : "") + "empty queue]";
}
final boolean isOnSyncQueue(Node node) {
if (node.waitStatus == Node.CONDITION || node.prev == null)
return false;
if (node.next != null)
return true;
return findNodeFromTail(node);
}
private boolean findNodeFromTail(Node node) {
for (Node p = tail;;) {
if (p == node)
return true;
if (p == null)
return false;
p = p.prev;
}
}
final boolean transferForSignal(Node node) {
if (!node.compareAndSetWaitStatus(Node.CONDITION, 0))
return false;
Node p = enq(node);
int ws = p.waitStatus;
if (ws > 0 || !p.compareAndSetWaitStatus(ws, Node.SIGNAL))
LockSupport.unpark(node.thread);
return true;
}
final boolean transferAfterCancelledWait(Node node) {
if (node.compareAndSetWaitStatus(Node.CONDITION, 0)) {
enq(node);
return true;
}
while (!isOnSyncQueue(node))
Thread.yield();
return false;
}
final long fullyRelease(Node node) {
try {
long savedState = getState();
if (release(savedState))
return savedState;
throw new IllegalMonitorStateException();
} catch (Throwable t) {
node.waitStatus = Node.CANCELLED;
throw t;
}
}
public final boolean owns(ConditionObject condition) {
return condition.isOwnedBy(this);
}
public final boolean hasWaiters(ConditionObject condition) {
if (!owns(condition))
throw new IllegalArgumentException("Not owner");
return condition.hasWaiters();
}
public final int getWaitQueueLength(ConditionObject condition) {
if (!owns(condition))
throw new IllegalArgumentException("Not owner");
return condition.getWaitQueueLength();
}
public final Collection<Thread> getWaitingThreads(ConditionObject condition) {
if (!owns(condition))
throw new IllegalArgumentException("Not owner");
return condition.getWaitingThreads();
}
public class ConditionObject implements Condition, java.io.Serializable {
private static final long serialVersionUID = 1173984872572414699L;
private transient Node firstWaiter;
private transient Node lastWaiter;
public ConditionObject() { }
private Node addConditionWaiter() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
Node t = lastWaiter;
if (t != null && t.waitStatus != Node.CONDITION) {
unlinkCancelledWaiters();
t = lastWaiter;
}
Node node = new Node(Node.CONDITION);
if (t == null)
firstWaiter = node;
else
t.nextWaiter = node;
lastWaiter = node;
return node;
}
private void doSignal(Node first) {
do {
if ( (firstWaiter = first.nextWaiter) == null)
lastWaiter = null;
first.nextWaiter = null;
} while (!transferForSignal(first) &&
(first = firstWaiter) != null);
}
private void doSignalAll(Node first) {
lastWaiter = firstWaiter = null;
do {
Node next = first.nextWaiter;
first.nextWaiter = null;
transferForSignal(first);
first = next;
} while (first != null);
}
private void unlinkCancelledWaiters() {
Node t = firstWaiter;
Node trail = null;
while (t != null) {
Node next = t.nextWaiter;
if (t.waitStatus != Node.CONDITION) {
t.nextWaiter = null;
if (trail == null)
firstWaiter = next;
else
trail.nextWaiter = next;
if (next == null)
lastWaiter = trail;
}
else
trail = t;
t = next;
}
}
public final void signal() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
Node first = firstWaiter;
if (first != null)
doSignal(first);
}
public final void signalAll() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
Node first = firstWaiter;
if (first != null)
doSignalAll(first);
}
public final void awaitUninterruptibly() {
Node node = addConditionWaiter();
long savedState = fullyRelease(node);
boolean interrupted = false;
while (!isOnSyncQueue(node)) {
LockSupport.park(this);
if (Thread.interrupted())
interrupted = true;
}
if (acquireQueued(node, savedState) || interrupted)
selfInterrupt();
}
private static final int REINTERRUPT = 1;
private static final int THROW_IE = -1;
private int checkInterruptWhileWaiting(Node node) {
return Thread.interrupted() ?
(transferAfterCancelledWait(node) ? THROW_IE : REINTERRUPT) :
0;
}
private void reportInterruptAfterWait(int interruptMode)
throws InterruptedException {
if (interruptMode == THROW_IE)
throw new InterruptedException();
else if (interruptMode == REINTERRUPT)
selfInterrupt();
}
public final void await() throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
Node node = addConditionWaiter();
long savedState = fullyRelease(node);
int interruptMode = 0;
while (!isOnSyncQueue(node)) {
LockSupport.park(this);
if ((interruptMode = checkInterruptWhileWaiting(node)) != 0)
break;
}
if (acquireQueued(node, savedState) && interruptMode != THROW_IE)
interruptMode = REINTERRUPT;
if (node.nextWaiter != null)
unlinkCancelledWaiters();
if (interruptMode != 0)
reportInterruptAfterWait(interruptMode);
}
public final long awaitNanos(long nanosTimeout)
throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
final long deadline = System.nanoTime() + nanosTimeout;
long initialNanos = nanosTimeout;
Node node = addConditionWaiter();
long savedState = fullyRelease(node);
int interruptMode = 0;
while (!isOnSyncQueue(node)) {
if (nanosTimeout <= 0L) {
transferAfterCancelledWait(node);
break;
}
if (nanosTimeout > SPIN_FOR_TIMEOUT_THRESHOLD)
LockSupport.parkNanos(this, nanosTimeout);
if ((interruptMode = checkInterruptWhileWaiting(node)) != 0)
break;
nanosTimeout = deadline - System.nanoTime();
}
if (acquireQueued(node, savedState) && interruptMode != THROW_IE)
interruptMode = REINTERRUPT;
if (node.nextWaiter != null)
unlinkCancelledWaiters();
if (interruptMode != 0)
reportInterruptAfterWait(interruptMode);
long remaining = deadline - System.nanoTime();
return (remaining <= initialNanos) ? remaining : Long.MIN_VALUE;
}
public final boolean awaitUntil(Date deadline)
throws InterruptedException {
long abstime = deadline.getTime();
if (Thread.interrupted())
throw new InterruptedException();
Node node = addConditionWaiter();
long savedState = fullyRelease(node);
boolean timedout = false;
int interruptMode = 0;
while (!isOnSyncQueue(node)) {
if (System.currentTimeMillis() >= abstime) {
timedout = transferAfterCancelledWait(node);
break;
}
LockSupport.parkUntil(this, abstime);
if ((interruptMode = checkInterruptWhileWaiting(node)) != 0)
break;
}
if (acquireQueued(node, savedState) && interruptMode != THROW_IE)
interruptMode = REINTERRUPT;
if (node.nextWaiter != null)
unlinkCancelledWaiters();
if (interruptMode != 0)
reportInterruptAfterWait(interruptMode);
return !timedout;
}
public final boolean await(long time, TimeUnit unit)
throws InterruptedException {
long nanosTimeout = unit.toNanos(time);
if (Thread.interrupted())
throw new InterruptedException();
final long deadline = System.nanoTime() + nanosTimeout;
Node node = addConditionWaiter();
long savedState = fullyRelease(node);
boolean timedout = false;
int interruptMode = 0;
while (!isOnSyncQueue(node)) {
if (nanosTimeout <= 0L) {
timedout = transferAfterCancelledWait(node);
break;
}
if (nanosTimeout > SPIN_FOR_TIMEOUT_THRESHOLD)
LockSupport.parkNanos(this, nanosTimeout);
if ((interruptMode = checkInterruptWhileWaiting(node)) != 0)
break;
nanosTimeout = deadline - System.nanoTime();
}
if (acquireQueued(node, savedState) && interruptMode != THROW_IE)
interruptMode = REINTERRUPT;
if (node.nextWaiter != null)
unlinkCancelledWaiters();
if (interruptMode != 0)
reportInterruptAfterWait(interruptMode);
return !timedout;
}
final boolean isOwnedBy(AbstractQueuedLongSynchronizer sync) {
return sync == AbstractQueuedLongSynchronizer.this;
}
protected final boolean hasWaiters() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
for (Node w = firstWaiter; w != null; w = w.nextWaiter) {
if (w.waitStatus == Node.CONDITION)
return true;
}
return false;
}
protected final int getWaitQueueLength() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
int n = 0;
for (Node w = firstWaiter; w != null; w = w.nextWaiter) {
if (w.waitStatus == Node.CONDITION)
++n;
}
return n;
}
protected final Collection<Thread> getWaitingThreads() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
ArrayList<Thread> list = new ArrayList<>();
for (Node w = firstWaiter; w != null; w = w.nextWaiter) {
if (w.waitStatus == Node.CONDITION) {
Thread t = w.thread;
if (t != null)
list.add(t);
}
}
return list;
}
}
private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe();
private static final long STATE;
private static final long HEAD;
private static final long TAIL;
static {
try {
STATE = U.objectFieldOffset
(AbstractQueuedLongSynchronizer.class.getDeclaredField("state"));
HEAD = U.objectFieldOffset
(AbstractQueuedLongSynchronizer.class.getDeclaredField("head"));
TAIL = U.objectFieldOffset
(AbstractQueuedLongSynchronizer.class.getDeclaredField("tail"));
} catch (ReflectiveOperationException e) {
throw new ExceptionInInitializerError(e);
}
Class<?> [MASK] = LockSupport.class;
}
private final void initializeSyncQueue() {
Node h;
if (U.compareAndSwapObject(this, HEAD, null, (h = new Node())))
tail = h;
}
private final boolean compareAndSetTail(Node expect, Node update) {
return U.compareAndSwapObject(this, TAIL, expect, update);
}
} | ensureLoaded | java | j2objc |
package io.plaidapp.ui.widget;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.text.TextPaint;
import android.text.method.PasswordTransformationMethod;
import android.util.AttributeSet;
import android.view.animation.Interpolator;
import androidx.annotation.NonNull;
import com.google.android.material.textfield.TextInputEditText;
import io.plaidapp.core.util.AnimUtils;
import static io.plaidapp.core.util.AnimUtils.lerp;
public class PasswordEntry extends TextInputEditText {
static final char[] PASSWORD_MASK = {'\u2022'};
private boolean passwordMasked;
private MaskMorphDrawable maskDrawable;
private ColorStateList textColor;
public PasswordEntry(Context context) {
super(context);
passwordMasked = getTransformationMethod() instanceof PasswordTransformationMethod;
}
public PasswordEntry(Context context, AttributeSet attrs) {
super(context, attrs);
passwordMasked = getTransformationMethod() instanceof PasswordTransformationMethod;
}
public PasswordEntry(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
passwordMasked = getTransformationMethod() instanceof PasswordTransformationMethod;
}
@Override
public void setText(CharSequence text, BufferType type) {
super.setText(text, type);
boolean isMasked = getTransformationMethod() instanceof PasswordTransformationMethod;
if (isMasked != passwordMasked) {
passwordMasked = isMasked;
passwordVisibilityToggled(isMasked, text);
}
}
@Override
public void setTextColor(ColorStateList colors) {
super.setTextColor(colors);
textColor = colors;
}
private void passwordVisibilityToggled(boolean isMasked, CharSequence password) {
if (maskDrawable == null) {
if (!isLaidOut() || getText() == null || getText().length() < 1) return;
maskDrawable = new MaskMorphDrawable(getContext(), getPaint(), getBaseline(),
getLayout().getPrimaryHorizontal(1), getPaddingLeft());
maskDrawable.setBounds(getPaddingLeft(), getPaddingTop(), getPaddingLeft(),
getHeight() - getPaddingBottom());
getOverlay().add(maskDrawable);
}
setTextColor(Color.TRANSPARENT);
Animator maskMorph = isMasked ?
maskDrawable.createShowMaskAnimator(password)
: maskDrawable.createHideMaskAnimator(password);
maskMorph.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
setTextColor(textColor);
}
});
maskMorph.start();
}
static class MaskMorphDrawable extends Drawable {
private static final float NO_PROGRESS = -1f;
private static final float PROGRESS_CHARACTER = 0f;
private static final float PROGRESS_MASK = 1f;
private final TextPaint paint;
private final float charWidth;
private final float maskDiameter;
private final float maskCenterY;
private final float insetStart;
private final int baseline;
private final long showPasswordDuration;
private final long hidePasswordDuration;
private final Interpolator fastOutSlowIn;
private CharSequence password;
private PasswordCharacter[] [MASK];
private float morphProgress;
MaskMorphDrawable(Context context, TextPaint textPaint,
int baseline, float charWidth, int insetStart) {
this.insetStart = insetStart;
this.baseline = baseline;
this.charWidth = charWidth;
paint = new TextPaint(textPaint);
Rect maskBounds = new Rect();
paint.getTextBounds(PASSWORD_MASK, 0, 1, maskBounds);
maskDiameter = maskBounds.height();
maskCenterY = (maskBounds.top + maskBounds.bottom) / 2f;
showPasswordDuration =
context.getResources().getInteger(io.plaidapp.R.integer.show_password_duration);
hidePasswordDuration =
context.getResources().getInteger(io.plaidapp.R.integer.hide_password_duration);
fastOutSlowIn = AnimUtils.getFastOutSlowInInterpolator(context);
}
Animator createShowMaskAnimator(CharSequence password) {
return morphPassword(password, PROGRESS_CHARACTER, PROGRESS_MASK, hidePasswordDuration);
}
Animator createHideMaskAnimator(CharSequence password) {
return morphPassword(password, PROGRESS_MASK, PROGRESS_CHARACTER, showPasswordDuration);
}
@Override
public void draw(@NonNull Canvas canvas) {
if ([MASK] != null && morphProgress != NO_PROGRESS) {
final int saveCount = canvas.save();
canvas.translate(insetStart, baseline);
for (int i = 0; i < [MASK].length; i++) {
[MASK][i].draw(canvas, paint, password, i, charWidth, morphProgress);
}
canvas.restoreToCount(saveCount);
}
}
@Override
public void setAlpha(int alpha) {
if (alpha != paint.getAlpha()) {
paint.setAlpha(alpha);
invalidateSelf();
}
}
@Override
public void setColorFilter(ColorFilter colorFilter) {
paint.setColorFilter(colorFilter);
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
private Animator morphPassword(
CharSequence pw, float fromProgress, float toProgress, long duration) {
password = pw;
updateBounds();
[MASK] = new PasswordCharacter[pw.length()];
String passStr = pw.toString();
for (int i = 0; i < pw.length(); i++) {
[MASK][i] = new PasswordCharacter(passStr, i, paint, maskDiameter, maskCenterY);
}
ValueAnimator anim = ValueAnimator.ofFloat(fromProgress, toProgress);
anim.addUpdateListener(valueAnimator -> {
morphProgress = (float) valueAnimator.getAnimatedValue();
invalidateSelf();
});
anim.setDuration(duration);
anim.setInterpolator(fastOutSlowIn);
anim.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
[MASK] = null;
morphProgress = NO_PROGRESS;
password = null;
updateBounds();
invalidateSelf();
}
});
return anim;
}
private void updateBounds() {
Rect oldBounds = getBounds();
if (password != null) {
setBounds(
oldBounds.left,
oldBounds.top,
oldBounds.left + (int) Math.ceil(password.length() * charWidth),
oldBounds.bottom);
} else {
setBounds(oldBounds.left, oldBounds.top, oldBounds.left, oldBounds.bottom);
}
}
}
static class PasswordCharacter {
private final Rect bounds = new Rect();
private final float textToMaskScale;
private final float maskToTextScale;
private final float textOffsetY;
PasswordCharacter(String password, int index, TextPaint paint,
float maskCharDiameter, float maskCenterY) {
paint.getTextBounds(password, index, index + 1, bounds);
maskToTextScale = Math.max(1f, bounds.width() / maskCharDiameter);
textToMaskScale = Math.min(0f, 1f / (bounds.height() / maskCharDiameter));
textOffsetY = maskCenterY - bounds.exactCenterY();
}
void draw(Canvas canvas, TextPaint paint, CharSequence password,
int index, float charWidth, float progress) {
int alpha = paint.getAlpha();
float x = charWidth * index;
canvas.save();
float textScale = lerp(1f, textToMaskScale, progress);
canvas.scale(textScale, textScale, x + bounds.exactCenterX(), bounds.exactCenterY());
paint.setAlpha((int) lerp(alpha, 0, progress));
canvas.drawText(password, index, index + 1,
x, lerp(0f, textOffsetY, progress) / textScale, paint);
canvas.restore();
canvas.save();
float maskScale = lerp(maskToTextScale, 1f, progress);
canvas.scale(maskScale, maskScale, x + bounds.exactCenterX(), bounds.exactCenterY());
paint.setAlpha((int) AnimUtils.lerp(0, alpha, progress));
canvas.drawText(PASSWORD_MASK, 0, 1, x, -lerp(textOffsetY, 0f, progress), paint);
canvas.restore();
paint.setAlpha(alpha);
}
}
} | characters | java | plaid |
package org.springframework.web.reactive.result.condition;
import org.jspecify.annotations.Nullable;
import org.springframework.web.server.ServerWebExchange;
public interface RequestCondition<T> {
T combine(T [MASK]);
@Nullable T getMatchingCondition(ServerWebExchange exchange);
int compareTo(T [MASK], ServerWebExchange exchange);
} | other | java | spring-framework |
package org.springframework.test.web.client.samples.matchers;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.web.Person;
import org.springframework.test.web.client.MockRestServiceServer;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.RestTemplate;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.content;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.jsonPath;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo;
import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess;
class JsonPathRequestMatchersIntegrationTests {
private static final MultiValueMap<String, Person> people = new LinkedMultiValueMap<>();
static {
people.add("composers", new Person("Johann Sebastian Bach"));
people.add("composers", new Person("Johannes Brahms"));
people.add("composers", new Person("Edvard Grieg"));
people.add("composers", new Person("Robert Schumann"));
people.add("performers", new Person("Vladimir Ashkenazy"));
people.add("performers", new Person("Yehudi Menuhin"));
}
private final RestTemplate restTemplate =
new RestTemplate(Collections.singletonList(new MappingJackson2HttpMessageConverter()));
private final MockRestServiceServer mockServer = MockRestServiceServer.createServer(this.restTemplate);
@Test
void exists() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0]").exists())
.andExpect(jsonPath("$.composers[1]").exists())
.andExpect(jsonPath("$.composers[2]").exists())
.andExpect(jsonPath("$.composers[3]").exists())
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void doesNotExist() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[?(@.name == 'Edvard Grieeeeeeg')]").doesNotExist())
.andExpect(jsonPath("$.composers[?(@.name == 'Robert Schuuuuuuman')]").doesNotExist())
.andExpect(jsonPath("$.composers[4]").doesNotExist())
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void value() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0].name").value("Johann Sebastian Bach"))
.andExpect(jsonPath("$.performers[1].name").value("Yehudi Menuhin"))
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void hamcrestMatchers() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0].name").value(equalTo("Johann Sebastian Bach")))
.andExpect(jsonPath("$.performers[1].name").value(equalTo("Yehudi Menuhin")))
.andExpect(jsonPath("$.composers[0].name", startsWith("Johann")))
.andExpect(jsonPath("$.performers[0].name", endsWith("Ashkenazy")))
.andExpect(jsonPath("$.performers[1].name", containsString("di Me")))
.andExpect(jsonPath("$.composers[1].name", is(in(Arrays.asList("Johann Sebastian Bach", "Johannes Brahms")))))
.andExpect(jsonPath("$.composers[:3].name", hasItem("Johannes Brahms")))
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void hamcrestMatchersWithParameterizedJsonPaths() {
String composerName = "$.composers[%s].name";
String [MASK] = "$.performers[%s].name";
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath(composerName, 0).value(startsWith("Johann")))
.andExpect(jsonPath([MASK], 0).value(endsWith("Ashkenazy")))
.andExpect(jsonPath([MASK], 1).value(containsString("di Me")))
.andExpect(jsonPath(composerName, 1).value(is(in(Arrays.asList("Johann Sebastian Bach", "Johannes Brahms")))))
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void isArray() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers").isArray())
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void isString() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0].name").isString())
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void isNumber() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0].someDouble").isNumber())
.andRespond(withSuccess());
executeAndVerify();
}
@Test
void isBoolean() {
this.mockServer.expect(requestTo("/composers"))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath("$.composers[0].someBoolean").isBoolean())
.andRespond(withSuccess());
executeAndVerify();
}
private void executeAndVerify() {
this.restTemplate.put(URI.create("/composers"), people);
this.mockServer.verify();
}
} | performerName | java | spring-framework |
package org.springframework.web.servlet.support;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import jakarta.servlet.MultipartConfigElement;
import jakarta.servlet.ServletRegistration;
import jakarta.servlet.ServletSecurityElement;
class MockServletRegistration implements ServletRegistration.Dynamic {
private int loadOnStartup;
private Set<String> mappings = new LinkedHashSet<>();
private String roleName;
private boolean asyncSupported = false;
public int getLoadOnStartup() {
return loadOnStartup;
}
@Override
public void setLoadOnStartup(int loadOnStartup) {
this.loadOnStartup = loadOnStartup;
}
@Override
public void setRunAsRole(String roleName) {
this.roleName = roleName;
}
@Override
public Set<String> addMapping(String... urlPatterns) {
mappings.addAll(Arrays.asList(urlPatterns));
return mappings;
}
@Override
public Collection<String> getMappings() {
return mappings;
}
@Override
public String getRunAsRole() {
return roleName;
}
@Override
public void setAsyncSupported(boolean isAsyncSupported) {
this.asyncSupported = isAsyncSupported;
}
public boolean isAsyncSupported() {
return this.asyncSupported;
}
@Override
public String getName() {
return null;
}
@Override
public void setMultipartConfig(MultipartConfigElement multipartConfig) {
}
@Override
public Set<String> setServletSecurity(ServletSecurityElement constraint) {
return null;
}
@Override
public String getClassName() {
return null;
}
@Override
public boolean setInitParameter(String name, String value) {
return false;
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Set<String> setInitParameters(Map<String, String> [MASK]) {
return null;
}
@Override
public Map<String, String> getInitParameters() {
return null;
}
} | initParameters | java | spring-framework |
package org.springframework.util.xml;
import java.io.StringReader;
import java.io.StringWriter;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.transform.Result;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.stream.StreamResult;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xmlunit.util.Predicate;
import org.springframework.core.testfixture.xml.XmlContent;
import static org.assertj.core.api.Assertions.assertThat;
abstract class AbstractStaxHandlerTests {
private static final String COMPLEX_XML =
"<?xml version='1.0' encoding='UTF-8'?>" +
"<!DOCTYPE beans PUBLIC \"-
"<?pi content?><root xmlns='namespace'><prefix:child xmlns:prefix='namespace2' prefix:attr='value'>characters <![CDATA[cdata]]></prefix:child>" +
"<!-- comment -->" +
"</root>";
private static final String SIMPLE_XML = "<?xml version='1.0' encoding='UTF-8'?>" +
"<?pi content?><root xmlns='namespace'><prefix:child xmlns:prefix='namespace2' prefix:attr='value'>content</prefix:child>" +
"</root>";
private static final Predicate<Node> nodeFilter = (n -> n.getNodeType() != Node.COMMENT_NODE &&
n.getNodeType() != Node.DOCUMENT_TYPE_NODE && n.getNodeType() != Node.PROCESSING_INSTRUCTION_NODE);
private XMLReader xmlReader;
@BeforeEach
void createXMLReader() throws Exception {
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser [MASK] = saxParserFactory.newSAXParser();
xmlReader = [MASK].getXMLReader();
xmlReader.setEntityResolver((publicId, systemId) -> new InputSource(new StringReader("")));
}
@Test
void noNamespacePrefixes() throws Exception {
StringWriter stringWriter = new StringWriter();
AbstractStaxHandler handler = createStaxHandler(new StreamResult(stringWriter));
xmlReader.setContentHandler(handler);
xmlReader.setProperty("http:
xmlReader.setFeature("http:
xmlReader.setFeature("http:
xmlReader.parse(new InputSource(new StringReader(COMPLEX_XML)));
assertThat(XmlContent.from(stringWriter)).isSimilarTo(COMPLEX_XML, nodeFilter);
}
@Test
void namespacePrefixes() throws Exception {
StringWriter stringWriter = new StringWriter();
AbstractStaxHandler handler = createStaxHandler(new StreamResult(stringWriter));
xmlReader.setContentHandler(handler);
xmlReader.setProperty("http:
xmlReader.setFeature("http:
xmlReader.setFeature("http:
xmlReader.parse(new InputSource(new StringReader(COMPLEX_XML)));
assertThat(XmlContent.from(stringWriter)).isSimilarTo(COMPLEX_XML, nodeFilter);
}
@Test
void noNamespacePrefixesDom() throws Exception {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document expected = documentBuilder.parse(new InputSource(new StringReader(SIMPLE_XML)));
Document result = documentBuilder.newDocument();
AbstractStaxHandler handler = createStaxHandler(new DOMResult(result));
xmlReader.setContentHandler(handler);
xmlReader.setProperty("http:
xmlReader.setFeature("http:
xmlReader.setFeature("http:
xmlReader.parse(new InputSource(new StringReader(SIMPLE_XML)));
assertThat(XmlContent.of(result)).isSimilarTo(expected, nodeFilter);
}
@Test
void namespacePrefixesDom() throws Exception {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document expected = documentBuilder.parse(new InputSource(new StringReader(SIMPLE_XML)));
Document result = documentBuilder.newDocument();
AbstractStaxHandler handler = createStaxHandler(new DOMResult(result));
xmlReader.setContentHandler(handler);
xmlReader.setProperty("http:
xmlReader.setFeature("http:
xmlReader.setFeature("http:
xmlReader.parse(new InputSource(new StringReader(SIMPLE_XML)));
assertThat(XmlContent.of(result)).isSimilarTo(expected, nodeFilter);
}
protected abstract AbstractStaxHandler createStaxHandler(Result result) throws XMLStreamException;
} | saxParser | java | spring-framework |
package org.elasticsearch.xpack.application.connector.filtering;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
public class FilteringValidation implements Writeable, ToXContentObject {
private final List<String> [MASK];
private final List<String> messages;
public FilteringValidation(List<String> [MASK], List<String> messages) {
this.[MASK] = [MASK];
this.messages = messages;
}
public FilteringValidation(StreamInput in) throws IOException {
this.[MASK] = in.readStringCollectionAsList();
this.messages = in.readStringCollectionAsList();
}
private static final ParseField IDS_FIELD = new ParseField("[MASK]");
private static final ParseField MESSAGES_FIELD = new ParseField("messages");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<FilteringValidation, Void> PARSER = new ConstructingObjectParser<>(
"connector_filtering_validation",
true,
args -> new Builder().setIds((List<String>) args[0]).setMessages((List<String>) args[1]).build()
);
static {
PARSER.declareStringArray(constructorArg(), IDS_FIELD);
PARSER.declareStringArray(constructorArg(), MESSAGES_FIELD);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.stringListField(IDS_FIELD.getPreferredName(), [MASK]);
builder.stringListField(MESSAGES_FIELD.getPreferredName(), messages);
}
builder.endObject();
return builder;
}
public static FilteringValidation fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringCollection([MASK]);
out.writeStringCollection(messages);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FilteringValidation that = (FilteringValidation) o;
return Objects.equals([MASK], that.[MASK]) && Objects.equals(messages, that.messages);
}
@Override
public int hashCode() {
return Objects.hash([MASK], messages);
}
public static class Builder {
private List<String> [MASK];
private List<String> messages;
public Builder setIds(List<String> [MASK]) {
this.[MASK] = [MASK];
return this;
}
public Builder setMessages(List<String> messages) {
this.messages = messages;
return this;
}
public FilteringValidation build() {
return new FilteringValidation([MASK], messages);
}
}
} | ids | java | elasticsearch |
package org.springframework.boot.docker.compose.service.connection.activemq;
import java.util.Collections;
import java.util.Map;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class ArtemisEnvironmentTests {
@Test
void getUserWhenHasNoActiveMqUser() {
ArtemisEnvironment [MASK] = new ArtemisEnvironment(Collections.emptyMap());
assertThat([MASK].getUser()).isNull();
}
@Test
void getUserWhenHasActiveMqUser() {
ArtemisEnvironment [MASK] = new ArtemisEnvironment(Map.of("ARTEMIS_USER", "me"));
assertThat([MASK].getUser()).isEqualTo("me");
}
@Test
void getPasswordWhenHasNoActiveMqPassword() {
ArtemisEnvironment [MASK] = new ArtemisEnvironment(Collections.emptyMap());
assertThat([MASK].getPassword()).isNull();
}
@Test
void getPasswordWhenHasActiveMqPassword() {
ArtemisEnvironment [MASK] = new ArtemisEnvironment(Map.of("ARTEMIS_PASSWORD", "secret"));
assertThat([MASK].getPassword()).isEqualTo("secret");
}
} | environment | java | spring-boot |
package org.springframework.context.testfixture.cache.beans;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.CachePut;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.Caching;
public class DefaultCacheableService implements CacheableService<Long> {
private final AtomicLong [MASK] = new AtomicLong();
private final AtomicLong nullInvocations = new AtomicLong();
@Override
@Cacheable("testCache")
public Long cache(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable("testCache")
public Long cacheNull(Object arg1) {
return null;
}
@Override
@Cacheable(cacheNames = "testCache", sync = true)
public Long cacheSync(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", sync = true)
public Long cacheSyncNull(Object arg1) {
return null;
}
@Override
@CacheEvict(cacheNames = "testCache", key = "#p0")
public void evict(Object arg1, Object arg2) {
}
@Override
@CacheEvict("testCache")
public void evictWithException(Object arg1) {
throw new RuntimeException("exception thrown - evict should NOT occur");
}
@Override
@CacheEvict(cacheNames = "testCache", beforeInvocation = true)
public void evictEarly(Object arg1) {
throw new RuntimeException("exception thrown - evict should still occur");
}
@Override
@CacheEvict(cacheNames = "testCache", allEntries = true)
public void evictAll(Object arg1) {
}
@Override
@CacheEvict(cacheNames = "testCache", allEntries = true, beforeInvocation = true)
public void evictAllEarly(Object arg1) {
throw new RuntimeException("exception thrown - evict should still occur");
}
@Override
@Cacheable(cacheNames = "testCache", condition = "#p0 == 3")
public Long conditional(int classField) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", sync = true, condition = "#p0 == 3")
public Long conditionalSync(int classField) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", unless = "#result > 10")
public Long unless(int arg) {
return (long) arg;
}
@Override
@Cacheable(cacheNames = "testCache", key = "#p0")
public Long key(Object arg1, Object arg2) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache")
public Long varArgsKey(Object... args) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", key = "#root.methodName")
public Long name(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", key = "#root.methodName + #root.method.name + #root.targetClass + #root.target")
public Long rootVars(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", keyGenerator = "customKeyGenerator")
public Long customKeyGenerator(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", keyGenerator = "unknownBeanName")
public Long unknownCustomKeyGenerator(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", cacheManager = "customCacheManager")
public Long customCacheManager(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Cacheable(cacheNames = "testCache", cacheManager = "unknownBeanName")
public Long unknownCustomCacheManager(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@CachePut("testCache")
public Long update(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@CachePut(cacheNames = "testCache", condition = "#arg.equals(3)")
public Long conditionalUpdate(Object arg) {
return Long.valueOf(arg.toString());
}
@Override
@Cacheable("testCache")
public Long nullValue(Object arg1) {
this.nullInvocations.incrementAndGet();
return null;
}
@Override
public Number nullInvocations() {
return this.nullInvocations.get();
}
@Override
@Cacheable("testCache")
public Long throwChecked(Object arg1) throws Exception {
throw new IOException(arg1.toString());
}
@Override
@Cacheable("testCache")
public Long throwUnchecked(Object arg1) {
throw new UnsupportedOperationException(arg1.toString());
}
@Override
@Cacheable(cacheNames = "testCache", sync = true)
public Long throwCheckedSync(Object arg1) throws Exception {
throw new IOException(arg1.toString());
}
@Override
@Cacheable(cacheNames = "testCache", sync = true)
public Long throwUncheckedSync(Object arg1) {
throw new UnsupportedOperationException(arg1.toString());
}
@Override
@Caching(cacheable = { @Cacheable("primary"), @Cacheable("secondary") })
public Long multiCache(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Caching(evict = { @CacheEvict("primary"), @CacheEvict(cacheNames = "secondary", key = "#p0"), @CacheEvict(cacheNames = "primary", key = "#p0 + 'A'") })
public Long multiEvict(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Caching(cacheable = { @Cacheable(cacheNames = "primary", key = "#root.methodName") }, evict = { @CacheEvict("secondary") })
public Long multiCacheAndEvict(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Caching(cacheable = { @Cacheable(cacheNames = "primary", condition = "#p0 == 3") }, evict = { @CacheEvict("secondary") })
public Long multiConditionalCacheAndEvict(Object arg1) {
return this.[MASK].getAndIncrement();
}
@Override
@Caching(put = { @CachePut("primary"), @CachePut("secondary") })
public Long multiUpdate(Object arg1) {
return Long.valueOf(arg1.toString());
}
@Override
@CachePut(cacheNames = "primary", key = "#result.id")
public TestEntity putRefersToResult(TestEntity arg1) {
arg1.setId(Long.MIN_VALUE);
return arg1;
}
@Override
@CachePut(cacheNames = "primary", key = "#result.id", unless = "#result == null")
public TestEntity putEvaluatesUnlessBeforeKey(TestEntity arg1) {
return (arg1.getId() != Long.MIN_VALUE ? arg1 : null);
}
} | counter | java | spring-framework |
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodeUtils;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.MachineLearning;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
import static org.hamcrest.Matchers.is;
public class TransportMlInfoActionTests extends ESTestCase {
public void testAreMlNodesBiggestSize() {
boolean [MASK] = randomBoolean();
long mlNodeSize = randomLongBetween(10000000L, 10000000000L);
long biggestSize = [MASK] ? mlNodeSize : mlNodeSize * randomLongBetween(2, 5);
int numMlNodes = randomIntBetween(2, 4);
var nodes = Stream.generate(
() -> DiscoveryNodeUtils.builder("node")
.roles(Set.of(DiscoveryNodeRole.ML_ROLE))
.attributes(Map.of(MachineLearning.MACHINE_MEMORY_NODE_ATTR, Long.toString(mlNodeSize)))
.build()
).limit(numMlNodes).toList();
assertThat(TransportMlInfoAction.areMlNodesBiggestSize(ByteSizeValue.ofBytes(biggestSize), nodes), is([MASK]));
}
} | expectedResult | java | elasticsearch |
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
public abstract class SourceValueFetcherIndexFieldData<T>
implements
IndexFieldData<SourceValueFetcherIndexFieldData.SourceValueFetcherLeafFieldData<T>> {
public abstract static class Builder<T> implements IndexFieldData.Builder {
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
public Builder(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
}
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected SourceValueFetcherIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return valuesSourceType;
}
@Override
public SourceValueFetcherLeafFieldData<T> load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
throw new IllegalArgumentException("not supported for source fallback");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat [MASK],
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("not supported for source fallback");
}
public abstract static class SourceValueFetcherLeafFieldData<T> implements LeafFieldData {
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected final LeafReaderContext leafReaderContext;
protected final ValueFetcher valueFetcher;
protected final SourceProvider sourceProvider;
public SourceValueFetcherLeafFieldData(
ToScriptFieldFactory<T> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceProvider sourceProvider
) {
this.toScriptFieldFactory = toScriptFieldFactory;
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceProvider = sourceProvider;
}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new IllegalArgumentException("not supported for source fallback");
}
}
public interface ValueFetcherDocValues {
}
} | format | java | elasticsearch |
package org.elasticsearch.logsdb.datageneration.matchers.source;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.logsdb.datageneration.matchers.GenericEqualsMatcher;
import org.elasticsearch.logsdb.datageneration.matchers.MatchResult;
import org.elasticsearch.xcontent.XContentBuilder;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage;
import static org.elasticsearch.logsdb.datageneration.matchers.Messages.prettyPrintCollections;
public class SourceMatcher extends GenericEqualsMatcher<List<Map<String, Object>>> {
private final Map<String, Map<String, Object>> mappingLookup;
private final Map<String, MappingTransforms.FieldMapping> actualNormalizedMapping;
private final Map<String, MappingTransforms.FieldMapping> expectedNormalizedMapping;
private final Map<String, FieldSpecificMatcher> fieldSpecificMatchers;
private final DynamicFieldMatcher dynamicFieldMatcher;
public SourceMatcher(
final Map<String, Map<String, Object>> mappingLookup,
final XContentBuilder actualMappings,
final Settings.Builder actualSettings,
final XContentBuilder expectedMappings,
final Settings.Builder [MASK],
final List<Map<String, Object>> actual,
final List<Map<String, Object>> expected,
final boolean ignoringSort
) {
super(actualMappings, actualSettings, expectedMappings, [MASK], actual, expected, ignoringSort);
this.mappingLookup = mappingLookup;
var actualMappingAsMap = XContentHelper.convertToMap(BytesReference.bytes(actualMappings), false, actualMappings.contentType())
.v2();
this.actualNormalizedMapping = MappingTransforms.normalizeMapping(actualMappingAsMap);
var expectedMappingAsMap = XContentHelper.convertToMap(BytesReference.bytes(expectedMappings), false, actualMappings.contentType())
.v2();
this.expectedNormalizedMapping = MappingTransforms.normalizeMapping(expectedMappingAsMap);
this.fieldSpecificMatchers = FieldSpecificMatcher.matchers(actualMappings, actualSettings, expectedMappings, [MASK]);
this.dynamicFieldMatcher = new DynamicFieldMatcher(actualMappings, actualSettings, expectedMappings, [MASK]);
}
@Override
public MatchResult match() {
if (actual.size() != expected.size()) {
return MatchResult.noMatch(
formatErrorMessage(
actualMappings,
actualSettings,
expectedMappings,
[MASK],
"Number of documents does not match, " + prettyPrintCollections(actual, expected)
)
);
}
var sortedAndFlattenedActual = actual.stream().map(s -> SourceTransforms.normalize(s, mappingLookup)).toList();
var sortedAndFlattenedExpected = expected.stream().map(s -> SourceTransforms.normalize(s, mappingLookup)).toList();
for (int i = 0; i < sortedAndFlattenedActual.size(); i++) {
var actual = sortedAndFlattenedActual.get(i);
var expected = sortedAndFlattenedExpected.get(i);
var result = compareSource(actual, expected);
if (result.isMatch() == false) {
var message = "Source matching failed at document id [" + i + "]. " + result.getMessage();
return MatchResult.noMatch(message);
}
}
return MatchResult.match();
}
private MatchResult compareSource(Map<String, List<Object>> actual, Map<String, List<Object>> expected) {
for (var expectedFieldEntry : expected.entrySet()) {
var name = expectedFieldEntry.getKey();
var actualValues = actual.get(name);
var expectedValues = expectedFieldEntry.getValue();
var matchIncludingFieldSpecificMatchers = matchWithFieldSpecificMatcher(name, actualValues, expectedValues);
if (matchIncludingFieldSpecificMatchers.isMatch() == false) {
var message = "Source documents don't match for field [" + name + "]: " + matchIncludingFieldSpecificMatchers.getMessage();
return MatchResult.noMatch(message);
}
}
return MatchResult.match();
}
private MatchResult matchWithFieldSpecificMatcher(String fieldName, List<Object> actualValues, List<Object> expectedValues) {
var actualFieldMapping = actualNormalizedMapping.get(fieldName);
if (actualFieldMapping == null) {
if (expectedNormalizedMapping.get(fieldName) != null
&& fieldName.equals("@timestamp") == false
&& fieldName.equals("host.name") == false) {
throw new IllegalStateException(
"Leaf field [" + fieldName + "] is present in expected mapping but absent in actual mapping"
);
}
return dynamicFieldMatcher.match(actualValues, expectedValues);
}
var actualFieldType = (String) actualFieldMapping.mappingParameters().get("type");
if (actualFieldType == null) {
throw new IllegalStateException("Field type is missing from leaf field Leaf field [" + fieldName + "] mapping parameters");
}
var expectedFieldMapping = expectedNormalizedMapping.get(fieldName);
if (expectedFieldMapping == null) {
throw new IllegalStateException("Leaf field [" + fieldName + "] is present in actual mapping but absent in expected mapping");
} else {
var expectedFieldType = expectedFieldMapping.mappingParameters().get("type");
if (Objects.equals(actualFieldType, expectedFieldType) == false) {
throw new IllegalStateException(
"Leaf field ["
+ fieldName
+ "] has type ["
+ actualFieldType
+ "] in actual mapping but a different type ["
+ expectedFieldType
+ "] in expected mapping"
);
}
}
var fieldSpecificMatcher = fieldSpecificMatchers.get(actualFieldType);
assert fieldSpecificMatcher != null : "Missing matcher for field type [" + actualFieldType + "]";
return fieldSpecificMatcher.match(
actualValues,
expectedValues,
actualFieldMapping.mappingParameters(),
expectedFieldMapping.mappingParameters()
);
}
} | expectedSettings | java | elasticsearch |
package io.reactivex.rxjava3.internal.operators.flowable;
import java.util.Objects;
import java.util.concurrent.atomic.*;
import org.reactivestreams.*;
import io.reactivex.rxjava3.annotations.*;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.exceptions.Exceptions;
import io.reactivex.rxjava3.functions.Function;
import io.reactivex.rxjava3.internal.operators.flowable.FlowableMap.MapSubscriber;
import io.reactivex.rxjava3.internal.subscriptions.*;
import io.reactivex.rxjava3.internal.util.*;
import io.reactivex.rxjava3.operators.SpscLinkedArrayQueue;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
public final class FlowableCombineLatest<T, R>
extends Flowable<R> {
@Nullable
final Publisher<? extends T>[] array;
@Nullable
final Iterable<? extends Publisher<? extends T>> iterable;
final Function<? super Object[], ? extends R> combiner;
final int bufferSize;
final boolean delayErrors;
public FlowableCombineLatest(@NonNull Publisher<? extends T>[] array,
@NonNull Function<? super Object[], ? extends R> combiner,
int bufferSize, boolean delayErrors) {
this.array = array;
this.iterable = null;
this.combiner = combiner;
this.bufferSize = bufferSize;
this.delayErrors = delayErrors;
}
public FlowableCombineLatest(@NonNull Iterable<? extends Publisher<? extends T>> iterable,
@NonNull Function<? super Object[], ? extends R> combiner,
int bufferSize, boolean delayErrors) {
this.array = null;
this.iterable = iterable;
this.combiner = combiner;
this.bufferSize = bufferSize;
this.delayErrors = delayErrors;
}
@SuppressWarnings("unchecked")
@Override
public void subscribeActual(Subscriber<? super R> s) {
Publisher<? extends T>[] sources = array;
int count;
if (sources == null) {
count = 0;
sources = new Publisher[8];
try {
for (Publisher<? extends T> p : iterable) {
if (count == sources.length) {
Publisher<? extends T>[] b = new Publisher[count + (count >> 2)];
System.arraycopy(sources, 0, b, 0, count);
sources = b;
}
sources[count++] = Objects.requireNonNull(p, "The Iterator returned a null Publisher");
}
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
EmptySubscription.error(ex, s);
return;
}
} else {
count = sources.length;
}
if (count == 0) {
EmptySubscription.complete(s);
return;
}
if (count == 1) {
sources[0].subscribe(new MapSubscriber<>(s, new SingletonArrayFunc()));
return;
}
CombineLatestCoordinator<T, R> coordinator =
new CombineLatestCoordinator<>(s, combiner, count, bufferSize, delayErrors);
s.onSubscribe(coordinator);
coordinator.subscribe(sources, count);
}
static final class CombineLatestCoordinator<T, R>
extends BasicIntQueueSubscription<R> {
private static final long serialVersionUID = -5082275438355852221L;
final Subscriber<? super R> downstream;
final Function<? super Object[], ? extends R> combiner;
final CombineLatestInnerSubscriber<T>[] subscribers;
final SpscLinkedArrayQueue<Object> queue;
final Object[] latest;
final boolean delayErrors;
boolean outputFused;
int nonEmptySources;
int completedSources;
volatile boolean cancelled;
final AtomicLong requested;
volatile boolean done;
final AtomicThrowable error;
CombineLatestCoordinator(Subscriber<? super R> actual,
Function<? super Object[], ? extends R> combiner, int n,
int bufferSize, boolean delayErrors) {
this.downstream = actual;
this.combiner = combiner;
@SuppressWarnings("unchecked")
CombineLatestInnerSubscriber<T>[] a = new CombineLatestInnerSubscriber[n];
for (int i = 0; i < n; i++) {
a[i] = new CombineLatestInnerSubscriber<>(this, i, bufferSize);
}
this.subscribers = a;
this.latest = new Object[n];
this.queue = new SpscLinkedArrayQueue<>(bufferSize);
this.requested = new AtomicLong();
this.error = new AtomicThrowable();
this.delayErrors = delayErrors;
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
drain();
}
}
@Override
public void cancel() {
cancelled = true;
cancelAll();
drain();
}
void subscribe(Publisher<? extends T>[] sources, int n) {
CombineLatestInnerSubscriber<T>[] a = subscribers;
for (int i = 0; i < n; i++) {
if (done || cancelled) {
return;
}
sources[i].subscribe(a[i]);
}
}
void innerValue(int index, T value) {
boolean replenishInsteadOfDrain;
synchronized (this) {
Object[] os = latest;
int localNonEmptySources = nonEmptySources;
if (os[index] == null) {
localNonEmptySources++;
nonEmptySources = localNonEmptySources;
}
os[index] = value;
if (os.length == localNonEmptySources) {
queue.offer(subscribers[index], os.clone());
replenishInsteadOfDrain = false;
} else {
replenishInsteadOfDrain = true;
}
}
if (replenishInsteadOfDrain) {
subscribers[index].requestOne();
} else {
drain();
}
}
void innerComplete(int index) {
synchronized (this) {
Object[] os = latest;
if (os[index] != null) {
int localCompletedSources = completedSources + 1;
if (localCompletedSources == os.length) {
done = true;
} else {
completedSources = localCompletedSources;
return;
}
} else {
done = true;
}
}
drain();
}
void innerError(int index, Throwable e) {
if (ExceptionHelper.addThrowable(error, e)) {
if (!delayErrors) {
cancelAll();
done = true;
drain();
} else {
innerComplete(index);
}
} else {
RxJavaPlugins.onError(e);
}
}
void drainOutput() {
final Subscriber<? super R> a = downstream;
final SpscLinkedArrayQueue<Object> q = queue;
int missed = 1;
for (;;) {
if (cancelled) {
q.clear();
return;
}
Throwable ex = error.get();
if (ex != null) {
q.clear();
a.onError(ex);
return;
}
boolean d = done;
boolean empty = q.isEmpty();
if (!empty) {
a.onNext(null);
}
if (d && empty) {
a.onComplete();
return;
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
@SuppressWarnings("unchecked")
void drainAsync() {
final Subscriber<? super R> a = downstream;
final SpscLinkedArrayQueue<Object> q = queue;
int missed = 1;
for (;;) {
long r = requested.get();
long e = 0L;
while (e != r) {
boolean d = done;
Object v = q.poll();
boolean empty = v == null;
if (checkTerminated(d, empty, a, q)) {
return;
}
if (empty) {
break;
}
T[] va = (T[])q.poll();
R w;
try {
w = Objects.requireNonNull(combiner.apply(va), "The combiner returned a null value");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
cancelAll();
ExceptionHelper.addThrowable(error, ex);
ex = ExceptionHelper.terminate(error);
a.onError(ex);
return;
}
a.onNext(w);
((CombineLatestInnerSubscriber<T>)v).requestOne();
e++;
}
if (e == r) {
if (checkTerminated(done, q.isEmpty(), a, q)) {
return;
}
}
if (e != 0L && r != Long.MAX_VALUE) {
requested.addAndGet(-e);
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
void drain() {
if (getAndIncrement() != 0) {
return;
}
if (outputFused) {
drainOutput();
} else {
drainAsync();
}
}
boolean checkTerminated(boolean d, boolean empty, Subscriber<?> a, SpscLinkedArrayQueue<?> q) {
if (cancelled) {
cancelAll();
q.clear();
error.tryTerminateAndReport();
return true;
}
if (d) {
if (delayErrors) {
if (empty) {
cancelAll();
error.tryTerminateConsumer(a);
return true;
}
} else {
Throwable e = ExceptionHelper.terminate(error);
if (e != null && e != ExceptionHelper.TERMINATED) {
cancelAll();
q.clear();
a.onError(e);
return true;
} else
if (empty) {
cancelAll();
a.onComplete();
return true;
}
}
}
return false;
}
void cancelAll() {
for (CombineLatestInnerSubscriber<T> inner : subscribers) {
inner.cancel();
}
}
@Override
public int requestFusion(int requestedMode) {
if ((requestedMode & BOUNDARY) != 0) {
return NONE;
}
int m = requestedMode & ASYNC;
outputFused = m != 0;
return m;
}
@Nullable
@SuppressWarnings("unchecked")
@Override
public R poll() throws Throwable {
Object e = queue.poll();
if (e == null) {
return null;
}
T[] a = (T[])queue.poll();
R r = Objects.requireNonNull(combiner.apply(a), "The combiner returned a null value");
((CombineLatestInnerSubscriber<T>)e).requestOne();
return r;
}
@Override
public void clear() {
queue.clear();
}
@Override
public boolean isEmpty() {
return queue.isEmpty();
}
}
static final class CombineLatestInnerSubscriber<T>
extends AtomicReference<Subscription>
implements FlowableSubscriber<T> {
private static final long serialVersionUID = -8730235182291002949L;
final CombineLatestCoordinator<T, ?> parent;
final int index;
final int [MASK];
final int limit;
int produced;
CombineLatestInnerSubscriber(CombineLatestCoordinator<T, ?> parent, int index, int [MASK]) {
this.parent = parent;
this.index = index;
this.[MASK] = [MASK];
this.limit = [MASK] - ([MASK] >> 2);
}
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.setOnce(this, s, [MASK]);
}
@Override
public void onNext(T t) {
parent.innerValue(index, t);
}
@Override
public void onError(Throwable t) {
parent.innerError(index, t);
}
@Override
public void onComplete() {
parent.innerComplete(index);
}
public void cancel() {
SubscriptionHelper.cancel(this);
}
public void requestOne() {
int p = produced + 1;
if (p == limit) {
produced = 0;
get().request(p);
} else {
produced = p;
}
}
}
final class SingletonArrayFunc implements Function<T, R> {
@Override
public R apply(T t) throws Throwable {
return combiner.apply(new Object[] { t });
}
}
} | prefetch | java | RxJava |
package org.elasticsearch.search;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.RefCounted;
import org.elasticsearch.core.SimpleRefCounted;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.transport.LeakTracker;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Objects;
public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable<SearchHit> {
public static final SearchHit[] EMPTY = new SearchHit[0];
public static final SearchHits EMPTY_WITH_TOTAL_HITS = SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, 0);
public static final SearchHits EMPTY_WITHOUT_TOTAL_HITS = SearchHits.empty(null, 0);
private final SearchHit[] hits;
private final TotalHits totalHits;
private final float maxScore;
@Nullable
private final SortField[] [MASK];
@Nullable
private final String collapseField;
@Nullable
private final Object[] collapseValues;
private final RefCounted refCounted;
public static SearchHits empty(@Nullable TotalHits totalHits, float maxScore) {
return new SearchHits(EMPTY, totalHits, maxScore);
}
public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) {
this(hits, totalHits, maxScore, null, null, null);
}
public SearchHits(
SearchHit[] hits,
@Nullable TotalHits totalHits,
float maxScore,
@Nullable SortField[] [MASK],
@Nullable String collapseField,
@Nullable Object[] collapseValues
) {
this(
hits,
totalHits,
maxScore,
[MASK],
collapseField,
collapseValues,
hits.length == 0 ? ALWAYS_REFERENCED : LeakTracker.wrap(new SimpleRefCounted())
);
}
private SearchHits(
SearchHit[] hits,
@Nullable TotalHits totalHits,
float maxScore,
@Nullable SortField[] [MASK],
@Nullable String collapseField,
@Nullable Object[] collapseValues,
RefCounted refCounted
) {
this.hits = hits;
this.totalHits = totalHits;
this.maxScore = maxScore;
this.[MASK] = [MASK];
this.collapseField = collapseField;
this.collapseValues = collapseValues;
this.refCounted = refCounted;
}
public static SearchHits unpooled(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) {
return unpooled(hits, totalHits, maxScore, null, null, null);
}
public static SearchHits unpooled(
SearchHit[] hits,
@Nullable TotalHits totalHits,
float maxScore,
@Nullable SortField[] [MASK],
@Nullable String collapseField,
@Nullable Object[] collapseValues
) {
assert assertUnpooled(hits);
return new SearchHits(hits, totalHits, maxScore, [MASK], collapseField, collapseValues, ALWAYS_REFERENCED);
}
private static boolean assertUnpooled(SearchHit[] searchHits) {
for (SearchHit searchHit : searchHits) {
assert searchHit.isPooled() == false : "hit was pooled [" + searchHit + "]";
}
return true;
}
public static SearchHits readFrom(StreamInput in, boolean pooled) throws IOException {
final TotalHits totalHits;
if (in.readBoolean()) {
totalHits = Lucene.readTotalHits(in);
} else {
totalHits = null;
}
final float maxScore = in.readFloat();
int size = in.readVInt();
final SearchHit[] hits;
boolean isPooled = false;
if (size == 0) {
hits = EMPTY;
} else {
hits = new SearchHit[size];
for (int i = 0; i < hits.length; i++) {
var hit = SearchHit.readFrom(in, pooled);
hits[i] = hit;
isPooled = isPooled || hit.isPooled();
}
}
var [MASK] = in.readOptional(Lucene::readSortFieldArray);
var collapseField = in.readOptionalString();
var collapseValues = in.readOptional(Lucene::readSortValues);
if (isPooled) {
return new SearchHits(hits, totalHits, maxScore, [MASK], collapseField, collapseValues);
} else {
return unpooled(hits, totalHits, maxScore, [MASK], collapseField, collapseValues);
}
}
public boolean isPooled() {
return refCounted != ALWAYS_REFERENCED;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
assert hasReferences();
final boolean hasTotalHits = totalHits != null;
out.writeBoolean(hasTotalHits);
if (hasTotalHits) {
Lucene.writeTotalHits(out, totalHits);
}
out.writeFloat(maxScore);
out.writeArray(hits);
out.writeOptional(Lucene::writeSortFieldArray, [MASK]);
out.writeOptionalString(collapseField);
out.writeOptionalArray(Lucene::writeSortValue, collapseValues);
}
@Nullable
public TotalHits getTotalHits() {
return totalHits;
}
public float getMaxScore() {
return maxScore;
}
public SearchHit[] getHits() {
assert hasReferences();
return this.hits;
}
public SearchHit getAt(int position) {
assert hasReferences();
return hits[position];
}
@Nullable
public SortField[] getSortFields() {
return [MASK];
}
@Nullable
public String getCollapseField() {
return collapseField;
}
@Nullable
public Object[] getCollapseValues() {
return collapseValues;
}
@Override
public Iterator<SearchHit> iterator() {
assert hasReferences();
return Iterators.forArray(getHits());
}
@Override
public void incRef() {
refCounted.incRef();
}
@Override
public boolean tryIncRef() {
return refCounted.tryIncRef();
}
@Override
public boolean decRef() {
if (refCounted.decRef()) {
deallocate();
return true;
}
return false;
}
private void deallocate() {
var hits = this.hits;
for (int i = 0; i < hits.length; i++) {
assert hits[i] != null;
hits[i].decRef();
hits[i] = null;
}
}
@Override
public boolean hasReferences() {
return refCounted.hasReferences();
}
public SearchHits asUnpooled() {
assert hasReferences();
if (refCounted == ALWAYS_REFERENCED) {
return this;
}
final SearchHit[] unpooledHits = new SearchHit[hits.length];
for (int i = 0; i < hits.length; i++) {
unpooledHits[i] = hits[i].asUnpooled();
}
return unpooled(unpooledHits, totalHits, maxScore, [MASK], collapseField, collapseValues);
}
public static final class Fields {
public static final String HITS = "hits";
public static final String TOTAL = "total";
public static final String MAX_SCORE = "max_score";
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
assert hasReferences();
return Iterators.concat(Iterators.single((b, p) -> {
b.startObject(Fields.HITS);
boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false);
if (totalHitAsInt) {
long total = totalHits == null ? -1 : totalHits.value();
b.field(Fields.TOTAL, total);
} else if (totalHits != null) {
b.startObject(Fields.TOTAL);
b.field("value", totalHits.value());
b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte");
b.endObject();
}
if (Float.isNaN(maxScore)) {
b.nullField(Fields.MAX_SCORE);
} else {
b.field(Fields.MAX_SCORE, maxScore);
}
return b;
}), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject());
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SearchHits other = (SearchHits) obj;
return Objects.equals(totalHits, other.totalHits)
&& Objects.equals(maxScore, other.maxScore)
&& Arrays.equals(hits, other.hits)
&& Arrays.equals([MASK], other.[MASK])
&& Objects.equals(collapseField, other.collapseField)
&& Arrays.equals(collapseValues, other.collapseValues);
}
@Override
public int hashCode() {
return Objects.hash(
totalHits,
maxScore,
Arrays.hashCode(hits),
Arrays.hashCode([MASK]),
collapseField,
Arrays.hashCode(collapseValues)
);
}
public static TotalHits parseTotalHitsFragment(XContentParser parser) throws IOException {
long value = -1;
Relation relation = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("value".equals(currentFieldName)) {
value = parser.longValue();
} else if ("relation".equals(currentFieldName)) {
relation = parseRelation(parser.text());
}
} else {
parser.skipChildren();
}
}
return new TotalHits(value, relation);
}
private static Relation parseRelation(String relation) {
if ("gte".equals(relation)) {
return Relation.GREATER_THAN_OR_EQUAL_TO;
} else if ("eq".equals(relation)) {
return Relation.EQUAL_TO;
} else {
throw new IllegalArgumentException("invalid total hits relation: " + relation);
}
}
} | sortFields | java | elasticsearch |
package android.icu.text;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigInteger;
import java.text.FieldPosition;
import java.text.Format;
import java.text.ParseException;
import java.text.ParsePosition;
import java.util.Collections;
import java.util.Locale;
import java.util.MissingResourceException;
import java.util.Set;
import android.icu.impl.ICUData;
import android.icu.impl.ICUResourceBundle;
import android.icu.util.Currency;
import android.icu.util.Currency.CurrencyUsage;
import android.icu.util.CurrencyAmount;
import android.icu.util.ULocale;
import android.icu.util.ULocale.Category;
import android.icu.util.UResourceBundle;
public abstract class NumberFormat extends UFormat {
public static final int NUMBERSTYLE = 0;
public static final int CURRENCYSTYLE = 1;
public static final int PERCENTSTYLE = 2;
public static final int SCIENTIFICSTYLE = 3;
public static final int INTEGERSTYLE = 4;
public static final int ISOCURRENCYSTYLE = 5;
public static final int PLURALCURRENCYSTYLE = 6;
public static final int ACCOUNTINGCURRENCYSTYLE = 7;
public static final int CASHCURRENCYSTYLE = 8;
public static final int STANDARDCURRENCYSTYLE = 9;
public static final int INTEGER_FIELD = 0;
public static final int FRACTION_FIELD = 1;
@Override
public StringBuffer format(Object number,
StringBuffer toAppendTo,
FieldPosition pos) {
if (number instanceof Long) {
return format(((Long)number).longValue(), toAppendTo, pos);
} else if (number instanceof BigInteger) {
return format((BigInteger) number, toAppendTo, pos);
} else if (number instanceof java.math.BigDecimal) {
return format((java.math.BigDecimal) number, toAppendTo, pos);
} else if (number instanceof android.icu.math.BigDecimal) {
return format((android.icu.math.BigDecimal) number, toAppendTo, pos);
} else if (number instanceof CurrencyAmount) {
return format((CurrencyAmount)number, toAppendTo, pos);
} else if (number instanceof Number) {
return format(((Number)number).doubleValue(), toAppendTo, pos);
} else {
throw new IllegalArgumentException("Cannot format given Object as a Number");
}
}
@Override
public final Object parseObject(String source,
ParsePosition parsePosition) {
return parse(source, parsePosition);
}
public final String format(double number) {
return format(number,new StringBuffer(),
new FieldPosition(0)).toString();
}
public final String format(long number) {
StringBuffer buf = new StringBuffer(19);
FieldPosition pos = new FieldPosition(0);
format(number, buf, pos);
return buf.toString();
}
public final String format(BigInteger number) {
return format(number, new StringBuffer(),
new FieldPosition(0)).toString();
}
public final String format(java.math.BigDecimal number) {
return format(number, new StringBuffer(),
new FieldPosition(0)).toString();
}
public final String format(android.icu.math.BigDecimal number) {
return format(number, new StringBuffer(),
new FieldPosition(0)).toString();
}
public final String format(CurrencyAmount currAmt) {
return format(currAmt, new StringBuffer(),
new FieldPosition(0)).toString();
}
public abstract StringBuffer format(double number,
StringBuffer toAppendTo,
FieldPosition pos);
public abstract StringBuffer format(long number,
StringBuffer toAppendTo,
FieldPosition pos);
public abstract StringBuffer format(BigInteger number,
StringBuffer toAppendTo,
FieldPosition pos);
public abstract StringBuffer format(java.math.BigDecimal number,
StringBuffer toAppendTo,
FieldPosition pos);
public abstract StringBuffer format(android.icu.math.BigDecimal number,
StringBuffer toAppendTo,
FieldPosition pos);
public StringBuffer format(CurrencyAmount currAmt,
StringBuffer toAppendTo,
FieldPosition pos) {
synchronized(this) {
Currency save = getCurrency(), curr = currAmt.getCurrency();
boolean same = curr.equals(save);
if (!same) setCurrency(curr);
format(currAmt.getNumber(), toAppendTo, pos);
if (!same) setCurrency(save);
}
return toAppendTo;
}
public abstract Number parse(String text, ParsePosition parsePosition);
public Number parse(String text) throws ParseException {
ParsePosition parsePosition = new ParsePosition(0);
Number result = parse(text, parsePosition);
if (parsePosition.getIndex() == 0) {
throw new ParseException("Unparseable number: \"" + text + '"',
parsePosition.getErrorIndex());
}
return result;
}
public CurrencyAmount parseCurrency(CharSequence text, ParsePosition pos) {
Number n = parse(text.toString(), pos);
return n == null ? null : new CurrencyAmount(n, getEffectiveCurrency());
}
public boolean isParseIntegerOnly() {
return parseIntegerOnly;
}
public void setParseIntegerOnly(boolean value) {
parseIntegerOnly = value;
}
public void setParseStrict(boolean value) {
parseStrict = value;
}
public boolean isParseStrict() {
return parseStrict;
}
public void setContext(DisplayContext context) {
if (context.type() == DisplayContext.Type.CAPITALIZATION) {
capitalizationSetting = context;
}
}
public DisplayContext getContext(DisplayContext.Type type) {
return (type == DisplayContext.Type.CAPITALIZATION && capitalizationSetting != null)?
capitalizationSetting: DisplayContext.CAPITALIZATION_NONE;
}
public final static NumberFormat getInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), NUMBERSTYLE);
}
public static NumberFormat getInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), NUMBERSTYLE);
}
public static NumberFormat getInstance(ULocale inLocale) {
return getInstance(inLocale, NUMBERSTYLE);
}
public final static NumberFormat getInstance(int style) {
return getInstance(ULocale.getDefault(Category.FORMAT), style);
}
public static NumberFormat getInstance(Locale inLocale, int style) {
return getInstance(ULocale.forLocale(inLocale), style);
}
public final static NumberFormat getNumberInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), NUMBERSTYLE);
}
public static NumberFormat getNumberInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), NUMBERSTYLE);
}
public static NumberFormat getNumberInstance(ULocale inLocale) {
return getInstance(inLocale, NUMBERSTYLE);
}
public final static NumberFormat getIntegerInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), INTEGERSTYLE);
}
public static NumberFormat getIntegerInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), INTEGERSTYLE);
}
public static NumberFormat getIntegerInstance(ULocale inLocale) {
return getInstance(inLocale, INTEGERSTYLE);
}
public final static NumberFormat getCurrencyInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), CURRENCYSTYLE);
}
public static NumberFormat getCurrencyInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), CURRENCYSTYLE);
}
public static NumberFormat getCurrencyInstance(ULocale inLocale) {
return getInstance(inLocale, CURRENCYSTYLE);
}
public final static NumberFormat getPercentInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), PERCENTSTYLE);
}
public static NumberFormat getPercentInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), PERCENTSTYLE);
}
public static NumberFormat getPercentInstance(ULocale inLocale) {
return getInstance(inLocale, PERCENTSTYLE);
}
public final static NumberFormat getScientificInstance() {
return getInstance(ULocale.getDefault(Category.FORMAT), SCIENTIFICSTYLE);
}
public static NumberFormat getScientificInstance(Locale inLocale) {
return getInstance(ULocale.forLocale(inLocale), SCIENTIFICSTYLE);
}
public static NumberFormat getScientificInstance(ULocale inLocale) {
return getInstance(inLocale, SCIENTIFICSTYLE);
}
public static abstract class NumberFormatFactory {
public static final int FORMAT_NUMBER = NUMBERSTYLE;
public static final int FORMAT_CURRENCY = CURRENCYSTYLE;
public static final int FORMAT_PERCENT = PERCENTSTYLE;
public static final int FORMAT_SCIENTIFIC = SCIENTIFICSTYLE;
public static final int FORMAT_INTEGER = INTEGERSTYLE;
public boolean visible() {
return true;
}
public abstract Set<String> getSupportedLocaleNames();
public NumberFormat createFormat(ULocale loc, int formatType) {
return createFormat(loc.toLocale(), formatType);
}
public NumberFormat createFormat(Locale loc, int formatType) {
return createFormat(ULocale.forLocale(loc), formatType);
}
protected NumberFormatFactory() {
}
}
public static abstract class SimpleNumberFormatFactory extends NumberFormatFactory {
final Set<String> localeNames;
final boolean visible;
public SimpleNumberFormatFactory(Locale locale) {
this(locale, true);
}
public SimpleNumberFormatFactory(Locale locale, boolean visible) {
localeNames = Collections.singleton(ULocale.forLocale(locale).getBaseName());
this.visible = visible;
}
public SimpleNumberFormatFactory(ULocale locale) {
this(locale, true);
}
public SimpleNumberFormatFactory(ULocale locale, boolean visible) {
localeNames = Collections.singleton(locale.getBaseName());
this.visible = visible;
}
@Override
public final boolean visible() {
return visible;
}
@Override
public final Set<String> getSupportedLocaleNames() {
return localeNames;
}
}
static abstract class NumberFormatShim {
abstract Locale[] getAvailableLocales();
abstract ULocale[] getAvailableULocales();
abstract Object registerFactory(NumberFormatFactory f);
abstract boolean unregister(Object k);
abstract NumberFormat createInstance(ULocale l, int k);
}
private static NumberFormatShim shim;
private static NumberFormatShim getShim() {
if (shim == null) {
shim = new android.icu.text.NumberFormatServiceShim();
}
return shim;
}
public static Locale[] getAvailableLocales() {
if (shim == null) {
return ICUResourceBundle.getAvailableLocales();
}
return getShim().getAvailableLocales();
}
public static ULocale[] getAvailableULocales() {
if (shim == null) {
return ICUResourceBundle.getAvailableULocales();
}
return getShim().getAvailableULocales();
}
public static Object registerFactory(NumberFormatFactory factory) {
if (factory == null) {
throw new IllegalArgumentException("factory must not be null");
}
return getShim().registerFactory(factory);
}
public static boolean unregister(Object registryKey) {
if (registryKey == null) {
throw new IllegalArgumentException("registryKey must not be null");
}
if (shim == null) {
return false;
}
return shim.unregister(registryKey);
}
@Override
public int hashCode() {
return maximumIntegerDigits * 37 + maxFractionDigits;
}
@Override
public boolean equals(Object obj) {
if (obj == null) return false;
if (this == obj)
return true;
if (getClass() != obj.getClass())
return false;
NumberFormat other = (NumberFormat) obj;
return maximumIntegerDigits == other.maximumIntegerDigits
&& minimumIntegerDigits == other.minimumIntegerDigits
&& maximumFractionDigits == other.maximumFractionDigits
&& minimumFractionDigits == other.minimumFractionDigits
&& groupingUsed == other.groupingUsed
&& parseIntegerOnly == other.parseIntegerOnly
&& parseStrict == other.parseStrict
&& capitalizationSetting == other.capitalizationSetting;
}
@Override
public Object clone() {
NumberFormat other = (NumberFormat) super.clone();
return other;
}
public boolean isGroupingUsed() {
return groupingUsed;
}
public void setGroupingUsed(boolean newValue) {
groupingUsed = newValue;
}
public int getMaximumIntegerDigits() {
return maximumIntegerDigits;
}
public void setMaximumIntegerDigits(int newValue) {
maximumIntegerDigits = Math.max(0,newValue);
if (minimumIntegerDigits > maximumIntegerDigits)
minimumIntegerDigits = maximumIntegerDigits;
}
public int getMinimumIntegerDigits() {
return minimumIntegerDigits;
}
public void setMinimumIntegerDigits(int newValue) {
minimumIntegerDigits = Math.max(0,newValue);
if (minimumIntegerDigits > maximumIntegerDigits)
maximumIntegerDigits = minimumIntegerDigits;
}
public int getMaximumFractionDigits() {
return maximumFractionDigits;
}
public void setMaximumFractionDigits(int newValue) {
maximumFractionDigits = Math.max(0,newValue);
if (maximumFractionDigits < minimumFractionDigits)
minimumFractionDigits = maximumFractionDigits;
}
public int getMinimumFractionDigits() {
return minimumFractionDigits;
}
public void setMinimumFractionDigits(int newValue) {
minimumFractionDigits = Math.max(0,newValue);
if (maximumFractionDigits < minimumFractionDigits)
maximumFractionDigits = minimumFractionDigits;
}
public void setCurrency(Currency theCurrency) {
currency = theCurrency;
}
public Currency getCurrency() {
return currency;
}
@Deprecated
protected Currency getEffectiveCurrency() {
Currency c = getCurrency();
if (c == null) {
ULocale uloc = getLocale(ULocale.VALID_LOCALE);
if (uloc == null) {
uloc = ULocale.getDefault(Category.FORMAT);
}
c = Currency.getInstance(uloc);
}
return c;
}
public int getRoundingMode() {
throw new UnsupportedOperationException(
"getRoundingMode must be implemented by the subclass implementation.");
}
public void setRoundingMode(int roundingMode) {
throw new UnsupportedOperationException(
"setRoundingMode must be implemented by the subclass implementation.");
}
public static NumberFormat getInstance(ULocale desiredLocale, int choice) {
if (choice < NUMBERSTYLE || choice > STANDARDCURRENCYSTYLE) {
throw new IllegalArgumentException(
"choice should be from NUMBERSTYLE to STANDARDCURRENCYSTYLE");
}
return getShim().createInstance(desiredLocale, choice);
}
static NumberFormat createInstance(ULocale desiredLocale, int choice) {
String pattern = getPattern(desiredLocale, choice);
DecimalFormatSymbols symbols = new DecimalFormatSymbols(desiredLocale);
if (choice == CURRENCYSTYLE || choice == ISOCURRENCYSTYLE || choice == ACCOUNTINGCURRENCYSTYLE
|| choice == CASHCURRENCYSTYLE || choice == STANDARDCURRENCYSTYLE) {
String temp = symbols.getCurrencyPattern();
if(temp!=null){
pattern = temp;
}
}
if (choice == ISOCURRENCYSTYLE) {
pattern = pattern.replace("\u00A4", doubleCurrencyStr);
}
NumberingSystem ns = NumberingSystem.getInstance(desiredLocale);
if ( ns == null ) {
return null;
}
NumberFormat format;
if ( ns != null && ns.isAlgorithmic()) {
String nsDesc;
String nsRuleSetGroup;
String nsRuleSetName;
ULocale nsLoc;
int desiredRulesType = RuleBasedNumberFormat.NUMBERING_SYSTEM;
nsDesc = ns.getDescription();
int firstSlash = nsDesc.indexOf("/");
int lastSlash = nsDesc.lastIndexOf("/");
if ( lastSlash > firstSlash ) {
String nsLocID = nsDesc.substring(0,firstSlash);
nsRuleSetGroup = nsDesc.substring(firstSlash+1,lastSlash);
nsRuleSetName = nsDesc.substring(lastSlash+1);
nsLoc = new ULocale(nsLocID);
if ( nsRuleSetGroup.equals("SpelloutRules")) {
desiredRulesType = RuleBasedNumberFormat.SPELLOUT;
}
} else {
nsLoc = desiredLocale;
nsRuleSetName = nsDesc;
}
RuleBasedNumberFormat r = new RuleBasedNumberFormat(nsLoc,desiredRulesType);
r.setDefaultRuleSet(nsRuleSetName);
format = r;
} else {
DecimalFormat f = new DecimalFormat(pattern, symbols, choice);
if (choice == INTEGERSTYLE) {
f.setMaximumFractionDigits(0);
f.setDecimalSeparatorAlwaysShown(false);
f.setParseIntegerOnly(true);
}
if (choice == CASHCURRENCYSTYLE) {
f.setCurrencyUsage(CurrencyUsage.CASH);
}
format = f;
}
ULocale valid = symbols.getLocale(ULocale.VALID_LOCALE);
ULocale actual = symbols.getLocale(ULocale.ACTUAL_LOCALE);
format.setLocale(valid, actual);
return format;
}
@Deprecated
protected static String getPattern(Locale forLocale, int choice) {
return getPattern(ULocale.forLocale(forLocale), choice);
}
protected static String getPattern(ULocale forLocale, int choice) {
String patternKey = null;
switch (choice) {
case NUMBERSTYLE:
case INTEGERSTYLE:
patternKey = "decimalFormat";
break;
case CURRENCYSTYLE:
String cfKeyValue = forLocale.getKeywordValue("cf");
patternKey = (cfKeyValue != null && cfKeyValue.equals("account")) ?
"accountingFormat" : "currencyFormat";
break;
case CASHCURRENCYSTYLE:
case ISOCURRENCYSTYLE:
case PLURALCURRENCYSTYLE:
case STANDARDCURRENCYSTYLE:
patternKey = "currencyFormat";
break;
case PERCENTSTYLE:
patternKey = "percentFormat";
break;
case SCIENTIFICSTYLE:
patternKey = "scientificFormat";
break;
case ACCOUNTINGCURRENCYSTYLE:
patternKey = "accountingFormat";
break;
default:
assert false;
patternKey = "decimalFormat";
break;
}
ICUResourceBundle rb = (ICUResourceBundle)UResourceBundle.
getBundleInstance(ICUData.ICU_BASE_NAME, forLocale);
NumberingSystem ns = NumberingSystem.getInstance(forLocale);
String result = rb.findStringWithFallback(
"NumberElements/" + ns.getName() + "/patterns/" + patternKey);
if (result == null) {
result = rb.getStringWithFallback("NumberElements/latn/patterns/" + patternKey);
}
return result;
}
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException
{
stream.defaultReadObject();
if (serialVersionOnStream < 1) {
maximumIntegerDigits = maxIntegerDigits;
minimumIntegerDigits = [MASK];
maximumFractionDigits = maxFractionDigits;
minimumFractionDigits = minFractionDigits;
}
if (serialVersionOnStream < 2) {
capitalizationSetting = DisplayContext.CAPITALIZATION_NONE;
}
if (minimumIntegerDigits > maximumIntegerDigits ||
minimumFractionDigits > maximumFractionDigits ||
minimumIntegerDigits < 0 || minimumFractionDigits < 0) {
throw new InvalidObjectException("Digit count range invalid");
}
serialVersionOnStream = currentSerialVersion;
}
private void writeObject(ObjectOutputStream stream)
throws IOException
{
maxIntegerDigits = (maximumIntegerDigits > Byte.MAX_VALUE) ? Byte.MAX_VALUE :
(byte)maximumIntegerDigits;
[MASK] = (minimumIntegerDigits > Byte.MAX_VALUE) ? Byte.MAX_VALUE :
(byte)minimumIntegerDigits;
maxFractionDigits = (maximumFractionDigits > Byte.MAX_VALUE) ? Byte.MAX_VALUE :
(byte)maximumFractionDigits;
minFractionDigits = (minimumFractionDigits > Byte.MAX_VALUE) ? Byte.MAX_VALUE :
(byte)minimumFractionDigits;
stream.defaultWriteObject();
}
private static final char[] doubleCurrencySign = {0xA4, 0xA4};
private static final String doubleCurrencyStr = new String(doubleCurrencySign);
private boolean groupingUsed = true;
private byte maxIntegerDigits = 40;
private byte [MASK] = 1;
private byte maxFractionDigits = 3;
private byte minFractionDigits = 0;
private boolean parseIntegerOnly = false;
private int maximumIntegerDigits = 40;
private int minimumIntegerDigits = 1;
private int maximumFractionDigits = 3;
private int minimumFractionDigits = 0;
private Currency currency;
static final int currentSerialVersion = 2;
private int serialVersionOnStream = currentSerialVersion;
private static final long serialVersionUID = -2308460125733713944L;
public NumberFormat() {
}
private boolean parseStrict;
private DisplayContext capitalizationSetting = DisplayContext.CAPITALIZATION_NONE;
public static class Field extends Format.Field {
static final long serialVersionUID = -4516273749929385842L;
public static final Field SIGN = new Field("sign");
public static final Field INTEGER = new Field("integer");
public static final Field FRACTION = new Field("fraction");
public static final Field EXPONENT = new Field("exponent");
public static final Field EXPONENT_SIGN = new Field("exponent sign");
public static final Field EXPONENT_SYMBOL = new Field("exponent symbol");
public static final Field DECIMAL_SEPARATOR = new Field("decimal separator");
public static final Field GROUPING_SEPARATOR = new Field("grouping separator");
public static final Field PERCENT = new Field("percent");
public static final Field PERMILLE = new Field("per mille");
public static final Field CURRENCY = new Field("currency");
protected Field(String fieldName) {
super(fieldName);
}
@Override
protected Object readResolve() throws InvalidObjectException {
if (this.getName().equals(INTEGER.getName()))
return INTEGER;
if (this.getName().equals(FRACTION.getName()))
return FRACTION;
if (this.getName().equals(EXPONENT.getName()))
return EXPONENT;
if (this.getName().equals(EXPONENT_SIGN.getName()))
return EXPONENT_SIGN;
if (this.getName().equals(EXPONENT_SYMBOL.getName()))
return EXPONENT_SYMBOL;
if (this.getName().equals(CURRENCY.getName()))
return CURRENCY;
if (this.getName().equals(DECIMAL_SEPARATOR.getName()))
return DECIMAL_SEPARATOR;
if (this.getName().equals(GROUPING_SEPARATOR.getName()))
return GROUPING_SEPARATOR;
if (this.getName().equals(PERCENT.getName()))
return PERCENT;
if (this.getName().equals(PERMILLE.getName()))
return PERMILLE;
if (this.getName().equals(SIGN.getName()))
return SIGN;
throw new InvalidObjectException("An invalid object.");
}
}
} | minIntegerDigits | java | j2objc |
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.hash.MurmurHash3;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BitArray;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiConsumer;
public class GlobalOrdCardinalityAggregator extends NumericMetricsAggregator.SingleValue {
private static final int MAX_FIELD_CARDINALITY_FOR_DYNAMIC_PRUNING = 1024;
private static final int MAX_TERMS_FOR_DYNAMIC_PRUNING = 128;
private final ValuesSource.Bytes.WithOrdinals valuesSource;
private final String field;
private final BigArrays bigArrays;
private final int maxOrd;
private final int precision;
private int dynamicPruningAttempts;
private int dynamicPruningSuccess;
private int bruteForce;
private int [MASK];
@Nullable
private HyperLogLogPlusPlus counts;
private ObjectArray<BitArray> visitedOrds;
private SortedSetDocValues values;
public GlobalOrdCardinalityAggregator(
String name,
ValuesSource.Bytes.WithOrdinals valuesSource,
String field,
int precision,
int maxOrd,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
this.valuesSource = valuesSource;
this.field = field;
this.precision = precision;
this.maxOrd = maxOrd;
this.bigArrays = context.bigArrays();
this.visitedOrds = bigArrays.newObjectArray(1);
}
@Override
public ScoreMode scoreMode() {
if (this.parent == null
&& field != null
&& valuesSource.needsScores() == false
&& maxOrd <= MAX_FIELD_CARDINALITY_FOR_DYNAMIC_PRUNING) {
return ScoreMode.TOP_DOCS;
} else if (valuesSource.needsScores()) {
return ScoreMode.COMPLETE;
} else {
return ScoreMode.COMPLETE_NO_SCORES;
}
}
private class CompetitiveIterator extends DocIdSetIterator {
private final BitArray visitedOrds;
private long numNonVisitedOrds;
private final TermsEnum indexTerms;
private final DocIdSetIterator docsWithField;
CompetitiveIterator(int numNonVisitedOrds, BitArray visitedOrds, Terms indexTerms, DocIdSetIterator docsWithField)
throws IOException {
this.visitedOrds = visitedOrds;
this.numNonVisitedOrds = numNonVisitedOrds;
this.indexTerms = Objects.requireNonNull(indexTerms).iterator();
this.docsWithField = docsWithField;
}
private Map<Long, PostingsEnum> nonVisitedOrds;
private PriorityQueue<PostingsEnum> nonVisitedPostings;
private int doc = -1;
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
if (nonVisitedPostings == null) {
return doc = docsWithField.advance(target);
} else if (nonVisitedPostings.size() == 0) {
return doc = DocIdSetIterator.NO_MORE_DOCS;
} else {
PostingsEnum top = nonVisitedPostings.top();
while (top.docID() < target) {
top.advance(target);
top = nonVisitedPostings.updateTop();
}
return doc = top.docID();
}
}
@Override
public long cost() {
return docsWithField.cost();
}
void startPruning() throws IOException {
dynamicPruningSuccess++;
nonVisitedOrds = new HashMap<>();
for (long ord = 0; ord < maxOrd; ++ord) {
if (visitedOrds.get(ord)) {
continue;
}
BytesRef term = values.lookupOrd(ord);
if (indexTerms.seekExact(term) == false) {
continue;
}
nonVisitedOrds.put(ord, indexTerms.postings(null, PostingsEnum.NONE));
}
nonVisitedPostings = new PriorityQueue<>(nonVisitedOrds.size()) {
@Override
protected boolean lessThan(PostingsEnum a, PostingsEnum b) {
return a.docID() < b.docID();
}
};
for (PostingsEnum pe : nonVisitedOrds.values()) {
nonVisitedPostings.add(pe);
}
}
void onVisitedOrdinal(long ordinal) throws IOException {
numNonVisitedOrds--;
if (nonVisitedOrds == null) {
if (numNonVisitedOrds <= MAX_TERMS_FOR_DYNAMIC_PRUNING) {
startPruning();
}
} else {
if (nonVisitedOrds.remove(ordinal) != null) {
nonVisitedPostings.clear();
for (PostingsEnum pe : nonVisitedOrds.values()) {
nonVisitedPostings.add(pe);
}
}
}
}
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final LeafBucketCollector sub) throws IOException {
values = valuesSource.globalOrdinalsValues(aggCtx.getLeafReaderContext());
final SortedDocValues singleton = DocValues.unwrapSingleton(values);
if (parent == null && field != null) {
final Terms indexTerms = aggCtx.getLeafReaderContext().reader().terms(field);
if (indexTerms != null) {
visitedOrds = bigArrays.grow(visitedOrds, 1);
final int numNonVisitedOrds;
{
final BitArray bits = visitedOrds.get(0);
numNonVisitedOrds = maxOrd - (bits == null ? 0 : (int) bits.cardinality());
}
if (maxOrd <= MAX_FIELD_CARDINALITY_FOR_DYNAMIC_PRUNING || numNonVisitedOrds <= MAX_TERMS_FOR_DYNAMIC_PRUNING) {
dynamicPruningAttempts++;
final BitArray bits = getNewOrExistingBitArray(0L);
final CompetitiveIterator competitiveIterator;
{
final DocIdSetIterator docsWithField = valuesSource.ordinalsValues(aggCtx.getLeafReaderContext());
competitiveIterator = new CompetitiveIterator(numNonVisitedOrds, bits, indexTerms, docsWithField);
if (numNonVisitedOrds <= MAX_TERMS_FOR_DYNAMIC_PRUNING) {
competitiveIterator.startPruning();
}
}
if (singleton != null) {
return new LeafBucketCollector() {
final SortedDocValues docValues = singleton;
@Override
public void collect(int doc, long bucketOrd) throws IOException {
if (docValues.advanceExact(doc)) {
final int ord = docValues.ordValue();
if (bits.getAndSet(ord) == false) {
competitiveIterator.onVisitedOrdinal(ord);
}
}
}
@Override
public CompetitiveIterator competitiveIterator() {
return competitiveIterator;
}
};
} else {
return new LeafBucketCollector() {
final SortedSetDocValues docValues = values;
@Override
public void collect(int doc, long bucketOrd) throws IOException {
if (docValues.advanceExact(doc)) {
for (int i = 0; i < docValues.docValueCount(); i++) {
long ord = docValues.nextOrd();
if (bits.getAndSet(ord) == false) {
competitiveIterator.onVisitedOrdinal(ord);
}
}
}
}
@Override
public CompetitiveIterator competitiveIterator() {
return competitiveIterator;
}
};
}
}
} else {
final FieldInfo fi = aggCtx.getLeafReaderContext().reader().getFieldInfos().fieldInfo(field);
if (fi != null && fi.getIndexOptions() != IndexOptions.NONE) {
[MASK]++;
return LeafBucketCollector.NO_OP_COLLECTOR;
}
}
}
bruteForce++;
if (singleton != null) {
return new LeafBucketCollector() {
final SortedDocValues docValues = singleton;
@Override
public void collect(int doc, long bucketOrd) throws IOException {
if (docValues.advanceExact(doc)) {
final BitArray bits = getNewOrExistingBitArray(bucketOrd);
bits.set(docValues.ordValue());
}
}
};
} else {
return new LeafBucketCollector() {
final SortedSetDocValues docValues = values;
@Override
public void collect(int doc, long bucketOrd) throws IOException {
if (docValues.advanceExact(doc)) {
final BitArray bits = getNewOrExistingBitArray(bucketOrd);
for (int i = 0; i < docValues.docValueCount(); i++) {
long ord = docValues.nextOrd();
bits.set((int) ord);
}
}
}
};
}
}
private BitArray getNewOrExistingBitArray(long bucketOrd) {
visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1);
BitArray bits = visitedOrds.get(bucketOrd);
if (bits == null) {
bits = new BitArray(maxOrd, bigArrays);
visitedOrds.set(bucketOrd, bits);
}
return bits;
}
protected void doPostCollection() throws IOException {
counts = new HyperLogLogPlusPlus(precision, bigArrays, visitedOrds.size());
try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) {
try (BitArray allVisitedOrds = new BitArray(maxOrd, bigArrays)) {
for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) {
final BitArray bits = visitedOrds.get(bucket);
if (bits != null) {
allVisitedOrds.or(bits);
}
}
final MurmurHash3.Hash128 hash = new MurmurHash3.Hash128();
for (long ord = allVisitedOrds.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd
? allVisitedOrds.nextSetBit(ord + 1)
: Long.MAX_VALUE) {
final BytesRef value = values.lookupOrd(ord);
MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash);
hashes.set(ord, hash.h1);
}
}
for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) {
try (BitArray bits = visitedOrds.get(bucket)) {
if (bits != null) {
visitedOrds.set(bucket, null);
for (long ord = bits.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd
? bits.nextSetBit(ord + 1)
: Long.MAX_VALUE) {
counts.collect(bucket, hashes.get(ord));
}
}
}
}
Releasables.close(visitedOrds);
visitedOrds = null;
}
}
@Override
public double metric(long owningBucketOrd) {
return counts.cardinality(owningBucketOrd);
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
if (counts == null || owningBucketOrdinal >= counts.maxOrd() || counts.cardinality(owningBucketOrdinal) == 0) {
return buildEmptyAggregation();
}
AbstractHyperLogLogPlusPlus copy = counts.clone(owningBucketOrdinal, BigArrays.NON_RECYCLING_INSTANCE);
return new InternalCardinality(name, copy, metadata());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalCardinality(name, null, metadata());
}
@Override
protected void doClose() {
if (visitedOrds != null) {
for (int i = 0; i < visitedOrds.size(); i++) {
Releasables.close(visitedOrds.get(i));
}
}
Releasables.close(visitedOrds, counts);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
super.collectDebugInfo(add);
add.accept("dynamic_pruning_attempted", dynamicPruningAttempts);
add.accept("dynamic_pruning_used", dynamicPruningSuccess);
add.accept("brute_force_used", bruteForce);
add.accept("skipped_due_to_no_data", [MASK]);
}
} | noData | java | elasticsearch |
package io.reactivex.rxjava3.internal.operators.[MASK];
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import org.junit.Test;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.testsupport.TestHelper;
public class ObservableDefaultIfEmptyTest extends RxJavaTest {
@Test
public void defaultIfEmpty() {
Observable<Integer> source = Observable.just(1, 2, 3);
Observable<Integer> [MASK] = source.defaultIfEmpty(10);
Observer<Integer> observer = TestHelper.mockObserver();
[MASK].subscribe(observer);
verify(observer, never()).onNext(10);
verify(observer).onNext(1);
verify(observer).onNext(2);
verify(observer).onNext(3);
verify(observer).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void defaultIfEmptyWithEmpty() {
Observable<Integer> source = Observable.empty();
Observable<Integer> [MASK] = source.defaultIfEmpty(10);
Observer<Integer> observer = TestHelper.mockObserver();
[MASK].subscribe(observer);
verify(observer).onNext(10);
verify(observer).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
} | observable | java | RxJava |
package org.springframework.boot.configurationprocessor.fieldvalues.javac;
import java.lang.reflect.Method;
class ReflectionWrapper {
private final Class<?> type;
private final Object [MASK];
ReflectionWrapper(String type, Object [MASK]) {
this.type = findClass([MASK].getClass().getClassLoader(), type);
this.[MASK] = this.type.cast([MASK]);
}
protected final Object getInstance() {
return this.[MASK];
}
@Override
public String toString() {
return this.[MASK].toString();
}
protected Class<?> findClass(String name) {
return findClass(getInstance().getClass().getClassLoader(), name);
}
protected Method findMethod(String name, Class<?>... parameterTypes) {
return findMethod(this.type, name, parameterTypes);
}
protected static Class<?> findClass(ClassLoader classLoader, String name) {
try {
return Class.forName(name, false, classLoader);
}
catch (ClassNotFoundException ex) {
throw new IllegalStateException(ex);
}
}
protected static Method findMethod(Class<?> type, String name, Class<?>... parameterTypes) {
try {
return type.getMethod(name, parameterTypes);
}
catch (Exception ex) {
throw new IllegalStateException(ex);
}
}
} | instance | java | spring-boot |
package com.google.devtools.build.lib.skyframe;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.stream.Collectors.groupingByConcurrent;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.flogger.GoogleLogger;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FileStateType;
import com.google.devtools.build.lib.actions.OutputChecker;
import com.google.devtools.build.lib.concurrent.ExecutorUtil;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.profiler.AutoProfiler;
import com.google.devtools.build.lib.profiler.AutoProfiler.ElapsedTimeReceiver;
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.SilentCloseable;
import com.google.devtools.build.lib.skyframe.SkyValueDirtinessChecker.DirtyResult;
import com.google.devtools.build.lib.skyframe.TreeArtifactValue.ArchivedRepresentation;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.BatchStat;
import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.FileStatusWithDigest;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Symlinks;
import com.google.devtools.build.lib.vfs.SyscallCache;
import com.google.devtools.build.lib.vfs.XattrProvider;
import com.google.devtools.build.skyframe.Differencer;
import com.google.devtools.build.skyframe.Differencer.DiffWithDelta.Delta;
import com.google.devtools.build.skyframe.FunctionHermeticity;
import com.google.devtools.build.skyframe.InMemoryGraph;
import com.google.devtools.build.skyframe.QueryableGraph.Reason;
import com.google.devtools.build.skyframe.SkyFunctionName;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.Version;
import com.google.devtools.build.skyframe.WalkableGraph;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
public class FilesystemValueChecker {
public interface XattrProviderOverrider {
XattrProvider getXattrProvider(SyscallCache syscallCache);
XattrProviderOverrider NO_OVERRIDE = syscallCache -> syscallCache;
}
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
private static final Predicate<SkyKey> ACTION_FILTER =
SkyFunctionName.functionIs(SkyFunctions.ACTION_EXECUTION);
@Nullable private final TimestampGranularityMonitor tsgm;
private final SyscallCache syscallCache;
private final XattrProviderOverrider xattrProviderOverrider;
private final int numThreads;
public FilesystemValueChecker(
@Nullable TimestampGranularityMonitor tsgm,
SyscallCache syscallCache,
XattrProviderOverrider xattrProviderOverrider,
int numThreads) {
this.tsgm = tsgm;
this.syscallCache = syscallCache;
this.xattrProviderOverrider = xattrProviderOverrider;
this.numThreads = numThreads;
}
public ImmutableBatchDirtyResult getDirtyKeys(
Map<SkyKey, SkyValue> valuesMap, SkyValueDirtinessChecker dirtinessChecker)
throws InterruptedException {
return getDirtyValues(
new MapBackedValueFetcher(valuesMap),
valuesMap.keySet(),
dirtinessChecker,
false,
null);
}
public ImmutableBatchDirtyResult getDirtyKeys(
InMemoryGraph inMemoryGraph, SkyValueDirtinessChecker dirtinessChecker)
throws InterruptedException {
Map<SkyKey, SkyValue> valuesMap = inMemoryGraph.getValues();
return getDirtyValues(
new MapBackedValueFetcher(valuesMap),
valuesMap.keySet(),
dirtinessChecker,
false,
inMemoryGraph);
}
public Differencer.DiffWithDelta getNewAndOldValues(
WalkableGraph walkableGraph,
Collection<SkyKey> keys,
SkyValueDirtinessChecker dirtinessChecker)
throws InterruptedException {
return getDirtyValues(
new WalkableGraphBackedValueFetcher(walkableGraph),
keys,
dirtinessChecker,
true,
null);
}
private interface ValueFetcher {
@Nullable
SkyValue get(SkyKey key) throws InterruptedException;
}
private static class WalkableGraphBackedValueFetcher implements ValueFetcher {
private final WalkableGraph walkableGraph;
private WalkableGraphBackedValueFetcher(WalkableGraph walkableGraph) {
this.walkableGraph = walkableGraph;
}
@Override
@Nullable
public SkyValue get(SkyKey key) throws InterruptedException {
return walkableGraph.getValue(key);
}
}
private static class MapBackedValueFetcher implements ValueFetcher {
private final Map<SkyKey, SkyValue> valuesMap;
private MapBackedValueFetcher(Map<SkyKey, SkyValue> valuesMap) {
this.valuesMap = valuesMap;
}
@Override
@Nullable
public SkyValue get(SkyKey key) {
return valuesMap.get(key);
}
}
@FunctionalInterface
@ThreadSafe
interface ModifiedOutputsReceiver {
void reportModifiedOutputFile(long maybeModifiedTime, Artifact artifact);
}
Collection<SkyKey> getDirtyActionValues(
Map<SkyKey, SkyValue> valuesMap,
@Nullable final BatchStat batchStatter,
ModifiedFileSet modifiedOutputFiles,
OutputChecker outputChecker,
ModifiedOutputsReceiver modifiedOutputsReceiver)
throws InterruptedException {
if (modifiedOutputFiles == ModifiedFileSet.NOTHING_MODIFIED) {
logger.atInfo().log("Not checking for dirty actions since nothing was modified");
return ImmutableList.of();
}
logger.atInfo().log("Accumulating dirty actions and batching them into shards");
int numShards = Runtime.getRuntime().availableProcessors() * 4;
Collection<List<Map.Entry<SkyKey, ActionExecutionValue>>> actionKeyShards;
try (SilentCloseable c =
Profiler.instance().profile("getDirtyActionValues/filterAndBatchActions")) {
actionKeyShards = batchActionKeysIntoShards(numShards, valuesMap);
}
ExecutorService executor =
Executors.newFixedThreadPool(
numShards,
new ThreadFactoryBuilder()
.setNameFormat("FileSystem Output File Invalidator %d")
.build());
Collection<SkyKey> dirtyKeys = Sets.newConcurrentHashSet();
final ImmutableSet<PathFragment> knownModifiedOutputFiles =
modifiedOutputFiles.treatEverythingAsModified()
? null
: modifiedOutputFiles.modifiedSourceFiles();
Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles =
Suppliers.memoize(
new Supplier<NavigableSet<PathFragment>>() {
@Nullable
@Override
public NavigableSet<PathFragment> get() {
if (knownModifiedOutputFiles == null) {
return null;
} else {
return ImmutableSortedSet.copyOf(knownModifiedOutputFiles);
}
}
});
boolean interrupted;
try (SilentCloseable c = Profiler.instance().profile("getDirtyActionValues/statFiles")) {
for (List<Map.Entry<SkyKey, ActionExecutionValue>> shard : actionKeyShards) {
Runnable job =
(batchStatter == null)
? outputStatJob(
dirtyKeys,
shard,
knownModifiedOutputFiles,
sortedKnownModifiedOutputFiles,
outputChecker,
modifiedOutputsReceiver)
: batchStatJob(
dirtyKeys,
shard,
batchStatter,
knownModifiedOutputFiles,
sortedKnownModifiedOutputFiles,
outputChecker,
modifiedOutputsReceiver);
executor.execute(job);
}
interrupted = ExecutorUtil.interruptibleShutdown(executor);
}
if (dirtyKeys.isEmpty()) {
logger.atInfo().log("Completed output file stat checks, no modified outputs found");
} else {
logger.atInfo().log(
"Completed output file stat checks, %d actions' outputs changed, first few: %s",
dirtyKeys.size(), Iterables.limit(dirtyKeys, 10));
}
if (interrupted) {
throw new InterruptedException();
}
return dirtyKeys;
}
@SuppressWarnings("unchecked")
private Collection<List<Map.Entry<SkyKey, ActionExecutionValue>>> batchActionKeysIntoShards(
int numShards, Map<SkyKey, SkyValue> valuesMap) {
return (Collection)
valuesMap.entrySet().stream()
.parallel()
.filter(e -> ACTION_FILTER.apply(e.getKey()))
.map(e -> (Map.Entry<?, ?>) e)
.collect(groupingByConcurrent(k -> ThreadLocalRandom.current().nextInt(numShards)))
.values();
}
private Runnable batchStatJob(
Collection<SkyKey> dirtyKeys,
List<Map.Entry<SkyKey, ActionExecutionValue>> shard,
BatchStat batchStatter,
ImmutableSet<PathFragment> knownModifiedOutputFiles,
Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles,
OutputChecker outputChecker,
ModifiedOutputsReceiver modifiedOutputsReceiver) {
return () -> {
Map<Artifact, Map.Entry<SkyKey, ActionExecutionValue>> fileToKeyAndValue = new HashMap<>();
Map<Artifact, Map.Entry<SkyKey, ActionExecutionValue>> treeArtifactsToKeyAndValue =
new HashMap<>();
for (Map.Entry<SkyKey, ActionExecutionValue> keyAndValue : shard) {
ActionExecutionValue actionValue = keyAndValue.getValue();
if (actionValue == null) {
dirtyKeys.add(keyAndValue.getKey());
} else {
for (Artifact artifact : actionValue.getAllFileValues().keySet()) {
if (!artifact.isRunfilesTree() && shouldCheckFile(knownModifiedOutputFiles, artifact)) {
fileToKeyAndValue.put(artifact, keyAndValue);
}
}
for (Map.Entry<Artifact, TreeArtifactValue> entry :
actionValue.getAllTreeArtifactValues().entrySet()) {
Artifact treeArtifact = entry.getKey();
TreeArtifactValue tree = entry.getValue();
for (TreeFileArtifact child : tree.getChildren()) {
if (shouldCheckFile(knownModifiedOutputFiles, child)) {
fileToKeyAndValue.put(child, keyAndValue);
}
}
tree.getArchivedRepresentation()
.map(ArchivedRepresentation::archivedTreeFileArtifact)
.filter(
archivedTreeArtifact ->
shouldCheckFile(knownModifiedOutputFiles, archivedTreeArtifact))
.ifPresent(
archivedTreeArtifact ->
fileToKeyAndValue.put(archivedTreeArtifact, keyAndValue));
if (shouldCheckTreeArtifact(sortedKnownModifiedOutputFiles.get(), treeArtifact)) {
treeArtifactsToKeyAndValue.put(treeArtifact, keyAndValue);
}
}
}
}
List<Artifact> artifacts = ImmutableList.copyOf(fileToKeyAndValue.keySet());
List<FileStatusWithDigest> [MASK];
try {
[MASK] = batchStatter.batchStat(Artifact.asPathFragments(artifacts));
} catch (IOException e) {
logger.atWarning().withCause(e).log(
"Unable to process batch stat, falling back to individual [MASK]");
outputStatJob(
dirtyKeys,
shard,
knownModifiedOutputFiles,
sortedKnownModifiedOutputFiles,
outputChecker,
modifiedOutputsReceiver)
.run();
return;
} catch (InterruptedException e) {
logger.atInfo().log("Interrupted doing batch stat");
Thread.currentThread().interrupt();
return;
}
Preconditions.checkState(
artifacts.size() == [MASK].size(),
"artifacts.size() == %s [MASK].size() == %s",
artifacts.size(),
[MASK].size());
for (int i = 0; i < artifacts.size(); i++) {
Artifact artifact = artifacts.get(i);
FileStatusWithDigest stat = [MASK].get(i);
Map.Entry<SkyKey, ActionExecutionValue> keyAndValue = fileToKeyAndValue.get(artifact);
ActionExecutionValue actionValue = keyAndValue.getValue();
SkyKey key = keyAndValue.getKey();
FileArtifactValue lastKnownData = actionValue.getExistingFileArtifactValue(artifact);
try {
FileArtifactValue newData =
ActionOutputMetadataStore.fileArtifactValueFromArtifact(
artifact, stat, xattrProviderOverrider.getXattrProvider(syscallCache), tsgm);
if (newData.couldBeModifiedSince(lastKnownData)) {
modifiedOutputsReceiver.reportModifiedOutputFile(
stat != null ? stat.getLastChangeTime() : -1, artifact);
dirtyKeys.add(key);
}
} catch (IOException e) {
logger.atWarning().withCause(e).log(
"Error for %s (%s %s %s)", artifact, stat, keyAndValue, lastKnownData);
modifiedOutputsReceiver.reportModifiedOutputFile(-1, artifact);
dirtyKeys.add(key);
}
}
for (Map.Entry<Artifact, Map.Entry<SkyKey, ActionExecutionValue>> entry :
treeArtifactsToKeyAndValue.entrySet()) {
Artifact artifact = entry.getKey();
try {
if (treeArtifactIsDirty(
entry.getKey(), entry.getValue().getValue().getTreeArtifactValue(artifact))) {
modifiedOutputsReceiver.reportModifiedOutputFile(
getBestEffortModifiedTime(artifact.getPath()), artifact);
dirtyKeys.add(entry.getValue().getKey());
}
} catch (InterruptedException e) {
logger.atInfo().log("Interrupted doing batch stat");
Thread.currentThread().interrupt();
return;
}
}
};
}
private Runnable outputStatJob(
Collection<SkyKey> dirtyKeys,
List<Map.Entry<SkyKey, ActionExecutionValue>> shard,
ImmutableSet<PathFragment> knownModifiedOutputFiles,
Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles,
OutputChecker outputChecker,
ModifiedOutputsReceiver modifiedOutputsReceiver) {
return new Runnable() {
@Override
public void run() {
try {
for (Map.Entry<SkyKey, ActionExecutionValue> keyAndValue : shard) {
ActionExecutionValue value = keyAndValue.getValue();
if (value == null
|| actionValueIsDirtyWithDirectSystemCalls(
value,
knownModifiedOutputFiles,
sortedKnownModifiedOutputFiles,
outputChecker,
modifiedOutputsReceiver)) {
dirtyKeys.add(keyAndValue.getKey());
}
}
} catch (InterruptedException e) {
logger.atInfo().log("Interrupted doing non-batch stat");
Thread.currentThread().interrupt();
}
}
};
}
private boolean treeArtifactIsDirty(Artifact artifact, TreeArtifactValue value)
throws InterruptedException {
Path path = artifact.getPath();
if (path.isSymbolicLink()) {
return true;
}
Set<PathFragment> currentLocalChildren = Sets.newConcurrentHashSet();
try {
TreeArtifactValue.visitTree(
path,
(child, type, traversedSymlink) -> {
if (type != Dirent.Type.DIRECTORY) {
currentLocalChildren.add(child);
}
});
} catch (IOException e) {
return true;
}
if (currentLocalChildren.isEmpty() && value.isEntirelyRemote()) {
return false;
}
var lastKnownLocalChildren =
value.getChildValues().entrySet().stream()
.filter(
entry -> {
var metadata = entry.getValue();
return !metadata.isRemote() || metadata.getContentsProxy() != null;
})
.map(entry -> entry.getKey().getParentRelativePath())
.collect(toImmutableSet());
return !currentLocalChildren.equals(lastKnownLocalChildren);
}
private boolean artifactIsDirtyWithDirectSystemCalls(
ImmutableSet<PathFragment> knownModifiedOutputFiles,
OutputChecker outputChecker,
Map.Entry<? extends Artifact, FileArtifactValue> entry,
ModifiedOutputsReceiver modifiedOutputsReceiver) {
Artifact file = entry.getKey();
FileArtifactValue lastKnownData = entry.getValue();
if (file.isRunfilesTree() || !shouldCheckFile(knownModifiedOutputFiles, file)) {
return false;
}
try {
FileArtifactValue fileMetadata =
ActionOutputMetadataStore.fileArtifactValueFromArtifact(
file, null, xattrProviderOverrider.getXattrProvider(syscallCache), tsgm);
boolean isTrustedRemoteValue =
fileMetadata.getType() == FileStateType.NONEXISTENT
&& lastKnownData.isRemote()
&& outputChecker.shouldTrustMetadata(file, lastKnownData);
if (!isTrustedRemoteValue && fileMetadata.couldBeModifiedSince(lastKnownData)) {
modifiedOutputsReceiver.reportModifiedOutputFile(
fileMetadata.getType() != FileStateType.NONEXISTENT
? file.getPath().getLastModifiedTime(Symlinks.FOLLOW)
: -1,
file);
return true;
}
return false;
} catch (IOException e) {
modifiedOutputsReceiver.reportModifiedOutputFile( -1, file);
return true;
}
}
private boolean actionValueIsDirtyWithDirectSystemCalls(
ActionExecutionValue actionValue,
ImmutableSet<PathFragment> knownModifiedOutputFiles,
Supplier<NavigableSet<PathFragment>> sortedKnownModifiedOutputFiles,
OutputChecker outputChecker,
ModifiedOutputsReceiver modifiedOutputsReceiver)
throws InterruptedException {
boolean isDirty = false;
for (Map.Entry<Artifact, FileArtifactValue> entry : actionValue.getAllFileValues().entrySet()) {
if (artifactIsDirtyWithDirectSystemCalls(
knownModifiedOutputFiles, outputChecker, entry, modifiedOutputsReceiver)) {
isDirty = true;
}
}
for (Map.Entry<Artifact, TreeArtifactValue> entry :
actionValue.getAllTreeArtifactValues().entrySet()) {
TreeArtifactValue tree = entry.getValue();
for (Map.Entry<TreeFileArtifact, FileArtifactValue> childEntry :
tree.getChildValues().entrySet()) {
if (artifactIsDirtyWithDirectSystemCalls(
knownModifiedOutputFiles, outputChecker, childEntry, modifiedOutputsReceiver)) {
isDirty = true;
}
}
isDirty =
isDirty
|| tree.getArchivedRepresentation()
.map(
archivedRepresentation ->
artifactIsDirtyWithDirectSystemCalls(
knownModifiedOutputFiles,
outputChecker,
Maps.immutableEntry(
archivedRepresentation.archivedTreeFileArtifact(),
archivedRepresentation.archivedFileValue()),
modifiedOutputsReceiver))
.orElse(false);
Artifact treeArtifact = entry.getKey();
if (shouldCheckTreeArtifact(sortedKnownModifiedOutputFiles.get(), treeArtifact)
&& treeArtifactIsDirty(treeArtifact, entry.getValue())) {
modifiedOutputsReceiver.reportModifiedOutputFile(
getBestEffortModifiedTime(treeArtifact.getPath()), treeArtifact);
isDirty = true;
}
}
return isDirty;
}
private static long getBestEffortModifiedTime(Path path) {
try {
return path.exists() ? path.getLastModifiedTime() : -1;
} catch (IOException e) {
logger.atWarning().atMostEvery(1, MINUTES).withCause(e).log(
"Failed to get modified time for output at: %s", path);
return -1;
}
}
private static boolean shouldCheckFile(
ImmutableSet<PathFragment> knownModifiedOutputFiles, Artifact artifact) {
return knownModifiedOutputFiles == null
|| knownModifiedOutputFiles.contains(artifact.getExecPath());
}
private static boolean shouldCheckTreeArtifact(
@Nullable NavigableSet<PathFragment> knownModifiedOutputFiles, Artifact treeArtifact) {
if (knownModifiedOutputFiles == null) {
return true;
}
PathFragment artifactExecPath = treeArtifact.getExecPath();
PathFragment headPath = knownModifiedOutputFiles.ceiling(artifactExecPath);
return headPath != null && headPath.startsWith(artifactExecPath);
}
private ImmutableBatchDirtyResult getDirtyValues(
ValueFetcher fetcher,
Collection<SkyKey> keys,
SkyValueDirtinessChecker checker,
boolean checkMissingValues,
@Nullable InMemoryGraph inMemoryGraph)
throws InterruptedException {
ExecutorService executor =
Executors.newFixedThreadPool(
numThreads,
new ThreadFactoryBuilder().setNameFormat("FileSystem Value Invalidator %d").build());
final AtomicInteger numKeysChecked = new AtomicInteger(0);
MutableBatchDirtyResult batchResult = new MutableBatchDirtyResult(numKeysChecked);
ElapsedTimeReceiver elapsedTimeReceiver =
elapsedTimeNanos -> {
if (elapsedTimeNanos > 0) {
logger.atInfo().log(
"Spent %d nanoseconds checking %d filesystem nodes (%d scanned)",
elapsedTimeNanos, numKeysChecked.get(), keys.size());
}
};
try (AutoProfiler prof = AutoProfiler.create(elapsedTimeReceiver)) {
for (final SkyKey key : keys) {
if (!checker.applies(key)) {
continue;
}
Preconditions.checkState(
key.functionName().getHermeticity() == FunctionHermeticity.NONHERMETIC,
"Only non-hermetic keys can be dirty roots: %s",
key);
executor.execute(
() -> {
SkyValue value;
try {
value = fetcher.get(key);
} catch (InterruptedException e) {
return;
}
if (!checkMissingValues && value == null) {
return;
}
@Nullable
Version oldMtsv =
inMemoryGraph != null
? inMemoryGraph
.get( null, Reason.OTHER, key)
.getMaxTransitiveSourceVersion()
: null;
numKeysChecked.incrementAndGet();
DirtyResult result;
try {
result = checker.check(key, value, oldMtsv, syscallCache, tsgm);
} catch (IOException e) {
result = DirtyResult.dirty();
}
if (result.isDirty()) {
batchResult.add(
key, value, result.getNewValue(), result.getNewMaxTransitiveSourceVersion());
}
});
}
if (ExecutorUtil.interruptibleShutdown(executor)) {
throw new InterruptedException();
}
}
return batchResult.toImmutable();
}
public static class ImmutableBatchDirtyResult implements Differencer.DiffWithDelta {
private final Collection<SkyKey> dirtyKeysWithoutNewValues;
private final Map<SkyKey, Delta> dirtyKeysWithNewAndOldValues;
private final int numKeysChecked;
private ImmutableBatchDirtyResult(
Collection<SkyKey> dirtyKeysWithoutNewValues,
Map<SkyKey, Delta> dirtyKeysWithNewAndOldValues,
int numKeysChecked) {
this.dirtyKeysWithoutNewValues = dirtyKeysWithoutNewValues;
this.dirtyKeysWithNewAndOldValues = dirtyKeysWithNewAndOldValues;
this.numKeysChecked = numKeysChecked;
}
@Override
public Collection<SkyKey> changedKeysWithoutNewValues() {
return dirtyKeysWithoutNewValues;
}
@Override
public Map<SkyKey, Delta> changedKeysWithNewValues() {
return dirtyKeysWithNewAndOldValues;
}
public int getNumKeysChecked() {
return numKeysChecked;
}
}
private static class MutableBatchDirtyResult {
private final Set<SkyKey> concurrentDirtyKeysWithoutNewValues =
Collections.newSetFromMap(new ConcurrentHashMap<SkyKey, Boolean>());
private final ConcurrentHashMap<SkyKey, Delta> concurrentDirtyKeysWithNewAndOldValues =
new ConcurrentHashMap<>();
private final AtomicInteger numChecked;
private MutableBatchDirtyResult(AtomicInteger numChecked) {
this.numChecked = numChecked;
}
private void add(
SkyKey key,
@Nullable SkyValue oldValue,
@Nullable SkyValue newValue,
@Nullable Version newMaxTransitiveSourceVersion) {
if (newValue == null) {
concurrentDirtyKeysWithoutNewValues.add(key);
} else {
if (oldValue == null) {
concurrentDirtyKeysWithNewAndOldValues.put(
key, Delta.justNew(newValue, newMaxTransitiveSourceVersion));
} else {
concurrentDirtyKeysWithNewAndOldValues.put(
key, Delta.changed(oldValue, newValue, newMaxTransitiveSourceVersion));
}
}
}
private ImmutableBatchDirtyResult toImmutable() {
return new ImmutableBatchDirtyResult(
concurrentDirtyKeysWithoutNewValues,
concurrentDirtyKeysWithNewAndOldValues,
numChecked.get());
}
}
} | stats | java | bazel |
package org.apache.xml.dtm;
public abstract class DTMAxisTraverser
{
public int first(int context)
{
return next(context, context);
}
public int first(int context, int [MASK])
{
return next(context, context, [MASK]);
}
public abstract int next(int context, int current);
public abstract int next(int context, int current, int [MASK]);
} | extendedTypeID | java | j2objc |
package org.elasticsearch.discovery;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.transport.TransportInfo;
import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING;
import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0)
public class SettingsBasedSeedHostsProviderIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings));
if (randomBoolean()) {
builder.putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey());
} else {
builder.remove(DISCOVERY_SEED_PROVIDERS_SETTING.getKey());
}
builder.remove(DISCOVERY_SEED_HOSTS_SETTING.getKey());
return builder.build();
}
public void testClusterFormsWithSingleSeedHostInSettings() {
final String [MASK] = internalCluster().startNode();
final NodesInfoResponse nodesInfoResponse = client([MASK]).admin()
.cluster()
.nodesInfo(new NodesInfoRequest("_local"))
.actionGet();
final String seedNodeAddress = nodesInfoResponse.getNodes()
.get(0)
.getInfo(TransportInfo.class)
.getAddress()
.publishAddress()
.toString();
logger.info("--> using seed node address {}", seedNodeAddress);
int extraNodes = randomIntBetween(1, 5);
internalCluster().startNodes(
extraNodes,
Settings.builder().putList(DISCOVERY_SEED_HOSTS_SETTING.getKey(), seedNodeAddress).build()
);
ensureStableCluster(extraNodes + 1);
}
} | seedNodeName | java | elasticsearch |
package org.springframework.docs.web.webmvc.mvcconfig.mvcconfigvalidation;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
@Controller
public class MyController {
@InitBinder
public void initBinder(WebDataBinder [MASK]) {
[MASK].addValidators(new FooValidator());
}
} | binder | java | spring-framework |
package com.google.devtools.build.lib.concurrent;
import java.util.concurrent.Callable;
public final class Uninterruptibles {
public static final <T> T callUninterruptibly(Callable<T> [MASK]) throws Exception {
boolean interrupted = false;
try {
while (true) {
try {
return [MASK].call();
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
} | callable | java | bazel |
package org.springframework.boot.actuate.endpoint.annotation;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.springframework.aop.scope.ScopedProxyUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.boot.actuate.endpoint.Access;
import org.springframework.boot.actuate.endpoint.EndpointFilter;
import org.springframework.boot.actuate.endpoint.EndpointId;
import org.springframework.boot.actuate.endpoint.EndpointsSupplier;
import org.springframework.boot.actuate.endpoint.ExposableEndpoint;
import org.springframework.boot.actuate.endpoint.Operation;
import org.springframework.boot.actuate.endpoint.OperationFilter;
import org.springframework.boot.actuate.endpoint.invoke.OperationInvoker;
import org.springframework.boot.actuate.endpoint.invoke.OperationInvokerAdvisor;
import org.springframework.boot.actuate.endpoint.invoke.ParameterValueMapper;
import org.springframework.boot.util.LambdaSafe;
import org.springframework.context.ApplicationContext;
import org.springframework.core.ResolvableType;
import org.springframework.core.annotation.MergedAnnotation;
import org.springframework.core.annotation.MergedAnnotations;
import org.springframework.core.annotation.MergedAnnotations.SearchStrategy;
import org.springframework.core.env.Environment;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.util.StringUtils;
public abstract class EndpointDiscoverer<E extends ExposableEndpoint<O>, O extends Operation>
implements EndpointsSupplier<E> {
private final ApplicationContext applicationContext;
private final Collection<EndpointFilter<E>> endpointFilters;
private final Collection<OperationFilter<O>> operationFilters;
private final DiscoveredOperationsFactory<O> operationsFactory;
private final Map<EndpointBean, E> filterEndpoints = new ConcurrentHashMap<>();
private volatile Collection<E> endpoints;
@Deprecated(since = "3.4.0", forRemoval = true)
public EndpointDiscoverer(ApplicationContext applicationContext, ParameterValueMapper parameterValueMapper,
Collection<OperationInvokerAdvisor> invokerAdvisors, Collection<EndpointFilter<E>> endpointFilters) {
this(applicationContext, parameterValueMapper, invokerAdvisors, endpointFilters, Collections.emptyList());
}
public EndpointDiscoverer(ApplicationContext applicationContext, ParameterValueMapper parameterValueMapper,
Collection<OperationInvokerAdvisor> invokerAdvisors, Collection<EndpointFilter<E>> endpointFilters,
Collection<OperationFilter<O>> operationFilters) {
Assert.notNull(applicationContext, "'applicationContext' must not be null");
Assert.notNull(parameterValueMapper, "'parameterValueMapper' must not be null");
Assert.notNull(invokerAdvisors, "'invokerAdvisors' must not be null");
Assert.notNull(endpointFilters, "'endpointFilters' must not be null");
Assert.notNull(operationFilters, "'operationFilters' must not be null");
this.applicationContext = applicationContext;
this.endpointFilters = Collections.unmodifiableCollection(endpointFilters);
this.operationFilters = Collections.unmodifiableCollection(operationFilters);
this.operationsFactory = getOperationsFactory(parameterValueMapper, invokerAdvisors);
}
private DiscoveredOperationsFactory<O> getOperationsFactory(ParameterValueMapper parameterValueMapper,
Collection<OperationInvokerAdvisor> invokerAdvisors) {
return new DiscoveredOperationsFactory<>(parameterValueMapper, invokerAdvisors) {
@Override
Collection<O> createOperations(EndpointId id, Object target) {
return super.createOperations(id, target);
}
@Override
protected O createOperation(EndpointId endpointId, DiscoveredOperationMethod operationMethod,
OperationInvoker invoker) {
return EndpointDiscoverer.this.createOperation(endpointId, operationMethod, invoker);
}
};
}
@Override
public final Collection<E> getEndpoints() {
if (this.endpoints == null) {
this.endpoints = discoverEndpoints();
}
return this.endpoints;
}
private Collection<E> discoverEndpoints() {
Collection<EndpointBean> endpointBeans = createEndpointBeans();
addExtensionBeans(endpointBeans);
return convertToEndpoints(endpointBeans);
}
private Collection<EndpointBean> createEndpointBeans() {
Map<EndpointId, EndpointBean> byId = new LinkedHashMap<>();
String[] beanNames = BeanFactoryUtils.beanNamesForAnnotationIncludingAncestors(this.applicationContext,
Endpoint.class);
for (String beanName : beanNames) {
if (!ScopedProxyUtils.isScopedTarget(beanName)) {
EndpointBean endpointBean = createEndpointBean(beanName);
EndpointBean previous = byId.putIfAbsent(endpointBean.getId(), endpointBean);
Assert.state(previous == null, () -> "Found two endpoints with the id '" + endpointBean.getId() + "': '"
+ endpointBean.getBeanName() + "' and '" + previous.getBeanName() + "'");
}
}
return byId.values();
}
private EndpointBean createEndpointBean(String beanName) {
Class<?> beanType = ClassUtils.getUserClass(this.applicationContext.getType(beanName, false));
Supplier<Object> beanSupplier = () -> this.applicationContext.getBean(beanName);
return new EndpointBean(this.applicationContext.getEnvironment(), beanName, beanType, beanSupplier);
}
private void addExtensionBeans(Collection<EndpointBean> endpointBeans) {
Map<EndpointId, EndpointBean> byId = endpointBeans.stream()
.collect(Collectors.toMap(EndpointBean::getId, Function.identity()));
String[] beanNames = BeanFactoryUtils.beanNamesForAnnotationIncludingAncestors(this.applicationContext,
EndpointExtension.class);
for (String beanName : beanNames) {
ExtensionBean extensionBean = createExtensionBean(beanName);
EndpointBean endpointBean = byId.get(extensionBean.getEndpointId());
Assert.state(endpointBean != null, () -> ("Invalid extension '" + extensionBean.getBeanName()
+ "': no endpoint found with id '" + extensionBean.getEndpointId() + "'"));
addExtensionBean(endpointBean, extensionBean);
}
}
private ExtensionBean createExtensionBean(String beanName) {
Class<?> beanType = ClassUtils.getUserClass(this.applicationContext.getType(beanName));
Supplier<Object> beanSupplier = () -> this.applicationContext.getBean(beanName);
return new ExtensionBean(this.applicationContext.getEnvironment(), beanName, beanType, beanSupplier);
}
private void addExtensionBean(EndpointBean endpointBean, ExtensionBean extensionBean) {
if (isExtensionExposed(endpointBean, extensionBean)) {
Assert.state(isEndpointExposed(endpointBean) || isEndpointFiltered(endpointBean),
() -> "Endpoint bean '" + endpointBean.getBeanName() + "' cannot support the extension bean '"
+ extensionBean.getBeanName() + "'");
endpointBean.addExtension(extensionBean);
}
}
private Collection<E> convertToEndpoints(Collection<EndpointBean> endpointBeans) {
Set<E> endpoints = new LinkedHashSet<>();
for (EndpointBean endpointBean : endpointBeans) {
if (isEndpointExposed(endpointBean)) {
E endpoint = convertToEndpoint(endpointBean);
if (isInvocable(endpoint)) {
endpoints.add(endpoint);
}
}
}
return Collections.unmodifiableSet(endpoints);
}
protected boolean isInvocable(E endpoint) {
return !endpoint.getOperations().isEmpty();
}
private E convertToEndpoint(EndpointBean endpointBean) {
MultiValueMap<OperationKey, O> indexed = new LinkedMultiValueMap<>();
EndpointId id = endpointBean.getId();
addOperations(indexed, id, endpointBean.getDefaultAccess(), endpointBean.getBean(), false);
if (endpointBean.getExtensions().size() > 1) {
String extensionBeans = endpointBean.getExtensions()
.stream()
.map(ExtensionBean::getBeanName)
.collect(Collectors.joining(", "));
throw new IllegalStateException("Found multiple extensions for the endpoint bean "
+ endpointBean.getBeanName() + " (" + extensionBeans + ")");
}
for (ExtensionBean extensionBean : endpointBean.getExtensions()) {
addOperations(indexed, id, endpointBean.getDefaultAccess(), extensionBean.getBean(), true);
}
assertNoDuplicateOperations(endpointBean, indexed);
List<O> operations = indexed.values().stream().map(this::getLast).filter(Objects::nonNull).toList();
return createEndpoint(endpointBean.getBean(), id, endpointBean.getDefaultAccess(), operations);
}
private void addOperations(MultiValueMap<OperationKey, O> indexed, EndpointId id, Access defaultAccess,
Object target, boolean replaceLast) {
Set<OperationKey> replacedLast = new HashSet<>();
Collection<O> operations = this.operationsFactory.createOperations(id, target);
for (O operation : operations) {
if (!isOperationFiltered(operation, id, defaultAccess)) {
OperationKey key = createOperationKey(operation);
O last = getLast(indexed.get(key));
if (replaceLast && replacedLast.add(key) && last != null) {
indexed.get(key).remove(last);
}
indexed.add(key, operation);
}
}
}
private <T> T getLast(List<T> list) {
return CollectionUtils.isEmpty(list) ? null : list.get(list.size() - 1);
}
private void assertNoDuplicateOperations(EndpointBean endpointBean, MultiValueMap<OperationKey, O> indexed) {
List<OperationKey> duplicates = indexed.entrySet()
.stream()
.filter((entry) -> entry.getValue().size() > 1)
.map(Map.Entry::getKey)
.toList();
if (!duplicates.isEmpty()) {
Set<ExtensionBean> extensions = endpointBean.getExtensions();
String extensionBeanNames = extensions.stream()
.map(ExtensionBean::getBeanName)
.collect(Collectors.joining(", "));
throw new IllegalStateException("Unable to map duplicate endpoint operations: " + duplicates + " to "
+ endpointBean.getBeanName() + (extensions.isEmpty() ? "" : " (" + extensionBeanNames + ")"));
}
}
private boolean isExtensionExposed(EndpointBean endpointBean, ExtensionBean extensionBean) {
return isFilterMatch(extensionBean.getFilter(), endpointBean)
&& isExtensionTypeExposed(extensionBean.getBeanType());
}
protected boolean isExtensionTypeExposed(Class<?> extensionBeanType) {
return true;
}
private boolean isEndpointExposed(EndpointBean endpointBean) {
return isFilterMatch(endpointBean.getFilter(), endpointBean) && !isEndpointFiltered(endpointBean)
&& isEndpointTypeExposed(endpointBean.getBeanType());
}
protected boolean isEndpointTypeExposed(Class<?> beanType) {
return true;
}
private boolean isEndpointFiltered(EndpointBean endpointBean) {
for (EndpointFilter<E> filter : this.endpointFilters) {
if (!isFilterMatch(filter, endpointBean)) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
private boolean isFilterMatch(Class<?> filter, EndpointBean endpointBean) {
if (!isEndpointTypeExposed(endpointBean.getBeanType())) {
return false;
}
if (filter == null) {
return true;
}
E endpoint = getFilterEndpoint(endpointBean);
Class<?> generic = ResolvableType.forClass(EndpointFilter.class, filter).resolveGeneric(0);
if (generic == null || generic.isInstance(endpoint)) {
EndpointFilter<E> instance = (EndpointFilter<E>) BeanUtils.instantiateClass(filter);
return isFilterMatch(instance, endpoint);
}
return false;
}
private boolean isFilterMatch(EndpointFilter<E> filter, EndpointBean endpointBean) {
return isFilterMatch(filter, getFilterEndpoint(endpointBean));
}
@SuppressWarnings("unchecked")
private boolean isFilterMatch(EndpointFilter<E> filter, E endpoint) {
return LambdaSafe.callback(EndpointFilter.class, filter, endpoint)
.withLogger(EndpointDiscoverer.class)
.invokeAnd((f) -> f.match(endpoint))
.get();
}
private boolean isOperationFiltered(Operation operation, EndpointId endpointId, Access defaultAccess) {
for (OperationFilter<O> filter : this.operationFilters) {
if (!isFilterMatch(filter, operation, endpointId, defaultAccess)) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
private boolean isFilterMatch(OperationFilter<O> filter, Operation operation, EndpointId endpointId,
Access defaultAccess) {
return LambdaSafe.callback(OperationFilter.class, filter, operation)
.withLogger(EndpointDiscoverer.class)
.invokeAnd((f) -> f.match(operation, endpointId, defaultAccess))
.get();
}
private E getFilterEndpoint(EndpointBean endpointBean) {
return this.filterEndpoints.computeIfAbsent(endpointBean, (key) -> createEndpoint(endpointBean.getBean(),
endpointBean.getId(), endpointBean.getDefaultAccess(), Collections.emptySet()));
}
@SuppressWarnings("unchecked")
protected Class<? extends E> getEndpointType() {
return (Class<? extends E>) ResolvableType.forClass(EndpointDiscoverer.class, getClass()).resolveGeneric(0);
}
@Deprecated(since = "3.4.0", forRemoval = true)
protected E createEndpoint(Object endpointBean, EndpointId id, boolean enabledByDefault, Collection<O> operations) {
return createEndpoint(endpointBean, id, (enabledByDefault) ? Access.UNRESTRICTED : Access.NONE, operations);
}
protected abstract E createEndpoint(Object endpointBean, EndpointId id, Access defaultAccess,
Collection<O> operations);
protected abstract O createOperation(EndpointId endpointId, DiscoveredOperationMethod operationMethod,
OperationInvoker invoker);
protected abstract OperationKey createOperationKey(O operation);
protected static final class OperationKey {
private final Object key;
private final Supplier<String> description;
public OperationKey(Object key, Supplier<String> description) {
Assert.notNull(key, "'key' must not be null");
Assert.notNull(description, "'description' must not be null");
this.key = key;
this.description = description;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return this.key.equals(((OperationKey) obj).key);
}
@Override
public int hashCode() {
return this.key.hashCode();
}
@Override
public String toString() {
return this.description.get();
}
}
private static class EndpointBean {
private final String beanName;
private final Class<?> beanType;
private final Supplier<Object> beanSupplier;
private final EndpointId id;
private final Access defaultAccess;
private final Class<?> filter;
private final Set<ExtensionBean> extensions = new LinkedHashSet<>();
EndpointBean(Environment [MASK], String beanName, Class<?> beanType, Supplier<Object> beanSupplier) {
MergedAnnotation<Endpoint> annotation = MergedAnnotations.from(beanType, SearchStrategy.TYPE_HIERARCHY)
.get(Endpoint.class);
String id = annotation.getString("id");
Assert.state(StringUtils.hasText(id),
() -> "No @Endpoint id attribute specified for " + beanType.getName());
this.beanName = beanName;
this.beanType = beanType;
this.beanSupplier = beanSupplier;
this.id = EndpointId.of([MASK], id);
boolean enabledByDefault = annotation.getBoolean("enableByDefault");
this.defaultAccess = enabledByDefault ? annotation.getEnum("defaultAccess", Access.class) : Access.NONE;
this.filter = getFilter(beanType);
}
void addExtension(ExtensionBean extensionBean) {
this.extensions.add(extensionBean);
}
Set<ExtensionBean> getExtensions() {
return this.extensions;
}
private Class<?> getFilter(Class<?> type) {
return MergedAnnotations.from(type, SearchStrategy.TYPE_HIERARCHY)
.get(FilteredEndpoint.class)
.getValue(MergedAnnotation.VALUE, Class.class)
.orElse(null);
}
String getBeanName() {
return this.beanName;
}
Class<?> getBeanType() {
return this.beanType;
}
Object getBean() {
return this.beanSupplier.get();
}
EndpointId getId() {
return this.id;
}
Access getDefaultAccess() {
return this.defaultAccess;
}
Class<?> getFilter() {
return this.filter;
}
}
private static class ExtensionBean {
private final String beanName;
private final Class<?> beanType;
private final Supplier<Object> beanSupplier;
private final EndpointId endpointId;
private final Class<?> filter;
ExtensionBean(Environment [MASK], String beanName, Class<?> beanType, Supplier<Object> beanSupplier) {
this.beanName = beanName;
this.beanType = beanType;
this.beanSupplier = beanSupplier;
MergedAnnotation<EndpointExtension> extensionAnnotation = MergedAnnotations
.from(beanType, SearchStrategy.TYPE_HIERARCHY)
.get(EndpointExtension.class);
Class<?> endpointType = extensionAnnotation.getClass("endpoint");
MergedAnnotation<Endpoint> endpointAnnotation = MergedAnnotations
.from(endpointType, SearchStrategy.TYPE_HIERARCHY)
.get(Endpoint.class);
Assert.state(endpointAnnotation.isPresent(),
() -> "Extension " + endpointType.getName() + " does not specify an endpoint");
this.endpointId = EndpointId.of([MASK], endpointAnnotation.getString("id"));
this.filter = extensionAnnotation.getClass("filter");
}
String getBeanName() {
return this.beanName;
}
Class<?> getBeanType() {
return this.beanType;
}
Object getBean() {
return this.beanSupplier.get();
}
EndpointId getEndpointId() {
return this.endpointId;
}
Class<?> getFilter() {
return this.filter;
}
}
} | environment | java | spring-boot |
package org.springframework.boot.gradle.dsl;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import org.gradle.testkit.runner.TaskOutcome;
import org.junit.jupiter.api.TestTemplate;
import org.springframework.boot.gradle.junit.GradleCompatibility;
import org.springframework.boot.gradle.tasks.buildinfo.BuildInfo;
import org.springframework.boot.testsupport.gradle.testkit.GradleBuild;
import static org.assertj.core.api.Assertions.assertThat;
@GradleCompatibility
class BuildInfoDslIntegrationTests {
GradleBuild [MASK];
@TestTemplate
void basicJar() {
assertThat(this.[MASK].build("bootBuildInfo", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Properties properties = buildInfoProperties();
assertThat(properties).containsEntry("build.name", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.artifact", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.group", "com.example");
assertThat(properties).containsEntry("build.version", "1.0");
}
@TestTemplate
void jarWithCustomName() {
assertThat(this.[MASK].build("bootBuildInfo", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Properties properties = buildInfoProperties();
assertThat(properties).containsEntry("build.name", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.artifact", "foo");
assertThat(properties).containsEntry("build.group", "com.example");
assertThat(properties).containsEntry("build.version", "1.0");
}
@TestTemplate
void basicWar() {
assertThat(this.[MASK].build("bootBuildInfo", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Properties properties = buildInfoProperties();
assertThat(properties).containsEntry("build.name", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.artifact", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.group", "com.example");
assertThat(properties).containsEntry("build.version", "1.0");
}
@TestTemplate
void warWithCustomName() {
assertThat(this.[MASK].build("bootBuildInfo", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Properties properties = buildInfoProperties();
assertThat(properties).containsEntry("build.name", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.artifact", "foo");
assertThat(properties).containsEntry("build.group", "com.example");
assertThat(properties).containsEntry("build.version", "1.0");
}
@TestTemplate
void additionalProperties() {
assertThat(this.[MASK].build("bootBuildInfo", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Properties properties = buildInfoProperties();
assertThat(properties).containsEntry("build.name", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.artifact", this.[MASK].getProjectDir().getName());
assertThat(properties).containsEntry("build.group", "com.example");
assertThat(properties).containsEntry("build.version", "1.0");
assertThat(properties).containsEntry("build.a", "alpha");
assertThat(properties).containsEntry("build.b", "bravo");
}
@TestTemplate
void classesDependency() {
assertThat(this.[MASK].build("classes", "--stacktrace").task(":bootBuildInfo").getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
}
private Properties buildInfoProperties() {
File file = new File(this.[MASK].getProjectDir(), "build/resources/main/META-INF/build-info.properties");
assertThat(file).isFile();
Properties properties = new Properties();
try (FileReader reader = new FileReader(file)) {
properties.load(reader);
return properties;
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
} | gradleBuild | java | spring-boot |
package com.google.common.collect;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
@NullUnmarked
public class ForwardingConcurrentMapTest extends TestCase {
private static class TestMap extends ForwardingConcurrentMap<String, Integer> {
final ConcurrentMap<String, Integer> [MASK] = new ConcurrentHashMap<>();
@Override
protected ConcurrentMap<String, Integer> [MASK]() {
return [MASK];
}
}
public void testPutIfAbsent() {
TestMap map = new TestMap();
map.put("foo", 1);
assertEquals(Integer.valueOf(1), map.putIfAbsent("foo", 2));
assertEquals(Integer.valueOf(1), map.get("foo"));
assertNull(map.putIfAbsent("bar", 3));
assertEquals(Integer.valueOf(3), map.get("bar"));
}
public void testRemove() {
TestMap map = new TestMap();
map.put("foo", 1);
assertFalse(map.remove("foo", 2));
assertFalse(map.remove("bar", 1));
assertEquals(Integer.valueOf(1), map.get("foo"));
assertTrue(map.remove("foo", 1));
assertTrue(map.isEmpty());
}
public void testReplace() {
TestMap map = new TestMap();
map.put("foo", 1);
assertEquals(Integer.valueOf(1), map.replace("foo", 2));
assertNull(map.replace("bar", 3));
assertEquals(Integer.valueOf(2), map.get("foo"));
assertFalse(map.containsKey("bar"));
}
public void testReplaceConditional() {
TestMap map = new TestMap();
map.put("foo", 1);
assertFalse(map.replace("foo", 2, 3));
assertFalse(map.replace("bar", 1, 2));
assertEquals(Integer.valueOf(1), map.get("foo"));
assertFalse(map.containsKey("bar"));
assertTrue(map.replace("foo", 1, 4));
assertEquals(Integer.valueOf(4), map.get("foo"));
}
} | delegate | java | guava |
package org.springframework.boot.test.context.assertj;
import java.util.function.Supplier;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
public interface AssertableApplicationContext
extends ApplicationContextAssertProvider<ConfigurableApplicationContext>, ConfigurableApplicationContext {
static AssertableApplicationContext get(Supplier<? extends ConfigurableApplicationContext> contextSupplier) {
return ApplicationContextAssertProvider.get(AssertableApplicationContext.class,
ConfigurableApplicationContext.class, contextSupplier);
}
static AssertableApplicationContext get(Supplier<? extends ConfigurableApplicationContext> contextSupplier,
Class<?>... [MASK]) {
return ApplicationContextAssertProvider.get(AssertableApplicationContext.class,
ConfigurableApplicationContext.class, contextSupplier, [MASK]);
}
} | additionalContextInterfaces | java | spring-boot |
package org.springframework.boot.build.toolchain;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.tasks.testing.Test;
import org.gradle.jvm.toolchain.JavaLanguageVersion;
import org.gradle.jvm.toolchain.JavaToolchainSpec;
public class ToolchainPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
configureToolchain(project);
}
private void configureToolchain(Project project) {
ToolchainExtension toolchain = project.getExtensions().create("toolchain", ToolchainExtension.class, project);
JavaLanguageVersion [MASK] = toolchain.getJavaVersion();
if ([MASK] != null) {
project.afterEvaluate((evaluated) -> configure(evaluated, toolchain));
}
}
private void configure(Project project, ToolchainExtension toolchain) {
if (!isJavaVersionSupported(toolchain, toolchain.getJavaVersion())) {
disableToolchainTasks(project);
}
else {
JavaToolchainSpec toolchainSpec = project.getExtensions()
.getByType(JavaPluginExtension.class)
.getToolchain();
toolchainSpec.getLanguageVersion().set(toolchain.getJavaVersion());
configureTestToolchain(project, toolchain);
}
}
private boolean isJavaVersionSupported(ToolchainExtension toolchain, JavaLanguageVersion [MASK]) {
return toolchain.getMaximumCompatibleJavaVersion()
.map((version) -> version.canCompileOrRun([MASK]))
.getOrElse(true);
}
private void disableToolchainTasks(Project project) {
project.getTasks().withType(Test.class, (task) -> task.setEnabled(false));
}
private void configureTestToolchain(Project project, ToolchainExtension toolchain) {
List<String> jvmArgs = new ArrayList<>(toolchain.getTestJvmArgs().getOrElse(Collections.emptyList()));
project.getTasks().withType(Test.class, (test) -> test.jvmArgs(jvmArgs));
}
} | toolchainVersion | java | spring-boot |
package hudson.tasks._maven;
import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.MarkupText;
import hudson.console.ConsoleAnnotationDescriptor;
import hudson.console.ConsoleAnnotator;
import hudson.console.ConsoleNote;
import java.util.regex.Pattern;
import org.jenkinsci.Symbol;
public class MavenMojoNote extends ConsoleNote {
public MavenMojoNote() {
}
@Override
public ConsoleAnnotator annotate(Object context, MarkupText [MASK], int charPos) {
[MASK].addMarkup(7, [MASK].length(), "<b class=maven-mojo>", "</b>");
return null;
}
@Extension @Symbol("mavenMojos")
public static final class DescriptorImpl extends ConsoleAnnotationDescriptor {
@NonNull
@Override
public String getDisplayName() {
return "Maven Mojos";
}
}
public static final Pattern PATTERN = Pattern.compile("\\[INFO\\] \\[[A-Za-z0-9-_]+:[A-Za-z0-9-_]+ \\{execution: [A-Za-z0-9-_]+\\}\\]");
} | text | java | jenkins |
package org.elasticsearch.ingest.common;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.equalTo;
public class FailProcessorTests extends ESTestCase {
public void test() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String [MASK] = randomAlphaOfLength(10);
Processor processor = new FailProcessor(randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory([MASK]));
try {
processor.execute(ingestDocument);
fail("fail processor should throw an exception");
} catch (FailProcessorException e) {
assertThat(e.getMessage(), equalTo([MASK]));
}
}
} | message | java | elasticsearch |
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import java.lang.Override;
import java.lang.String;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BytesRefBlock;
import org.elasticsearch.compute.data.BytesRefVector;
import org.elasticsearch.compute.data.IntVector;
import org.elasticsearch.compute.data.OrdinalBytesRefVector;
import org.elasticsearch.compute.data.Vector;
import org.elasticsearch.compute.operator.DriverContext;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.xpack.esql.core.tree.Source;
public final class ToStringFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator {
private final EvalOperator.ExpressionEvaluator wkb;
public ToStringFromGeoPointEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb,
DriverContext driverContext) {
super(driverContext, source);
this.wkb = wkb;
}
@Override
public EvalOperator.ExpressionEvaluator next() {
return wkb;
}
@Override
public Block evalVector(Vector v) {
BytesRefVector vector = (BytesRefVector) v;
OrdinalBytesRefVector ordinals = vector.asOrdinals();
if (ordinals != null) {
return evalOrdinals(ordinals);
}
int positionCount = v.getPositionCount();
BytesRef scratchPad = new BytesRef();
if (vector.isConstant()) {
return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount);
}
try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendBytesRef(evalValue(vector, p, scratchPad));
}
return builder.build();
}
}
private BytesRef evalValue(BytesRefVector [MASK], int index, BytesRef scratchPad) {
BytesRef value = [MASK].getBytesRef(index, scratchPad);
return ToString.fromGeoPoint(value);
}
@Override
public Block evalBlock(Block b) {
BytesRefBlock block = (BytesRefBlock) b;
int positionCount = block.getPositionCount();
try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef scratchPad = new BytesRef();
for (int p = 0; p < positionCount; p++) {
int valueCount = block.getValueCount(p);
int start = block.getFirstValueIndex(p);
int end = start + valueCount;
boolean positionOpened = false;
boolean valuesAppended = false;
for (int i = start; i < end; i++) {
BytesRef value = evalValue(block, i, scratchPad);
if (positionOpened == false && valueCount > 1) {
builder.beginPositionEntry();
positionOpened = true;
}
builder.appendBytesRef(value);
valuesAppended = true;
}
if (valuesAppended == false) {
builder.appendNull();
} else if (positionOpened) {
builder.endPositionEntry();
}
}
return builder.build();
}
}
private BytesRef evalValue(BytesRefBlock [MASK], int index, BytesRef scratchPad) {
BytesRef value = [MASK].getBytesRef(index, scratchPad);
return ToString.fromGeoPoint(value);
}
private Block evalOrdinals(OrdinalBytesRefVector v) {
int positionCount = v.getDictionaryVector().getPositionCount();
BytesRef scratchPad = new BytesRef();
try (BytesRefVector.Builder builder = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendBytesRef(evalValue(v.getDictionaryVector(), p, scratchPad));
}
IntVector ordinals = v.getOrdinalsVector();
ordinals.incRef();
return new OrdinalBytesRefVector(ordinals, builder.build()).asBlock();
}
}
@Override
public String toString() {
return "ToStringFromGeoPointEvaluator[" + "wkb=" + wkb + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(wkb);
}
public static class Factory implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory wkb;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory wkb) {
this.source = source;
this.wkb = wkb;
}
@Override
public ToStringFromGeoPointEvaluator get(DriverContext context) {
return new ToStringFromGeoPointEvaluator(source, wkb.get(context), context);
}
@Override
public String toString() {
return "ToStringFromGeoPointEvaluator[" + "wkb=" + wkb + "]";
}
}
} | container | java | elasticsearch |
package java.security;
public class GuardedObject implements java.io.Serializable {
private static final long serialVersionUID = -5240450096227834308L;
private Object object;
private Guard guard;
public GuardedObject(Object object, Guard guard)
{
this.guard = guard;
this.object = object;
}
public Object getObject()
throws SecurityException
{
if (guard != null)
guard.checkGuard(object);
return object;
}
private void writeObject(java.io.ObjectOutputStream [MASK])
throws java.io.IOException
{
if (guard != null)
guard.checkGuard(object);
[MASK].defaultWriteObject();
}
} | oos | java | j2objc |
package android.icu.util;
public class ICUCloneNotSupportedException extends ICUException {
private static final long [MASK] = -4824446458488194964L;
public ICUCloneNotSupportedException() {
}
public ICUCloneNotSupportedException(String message) {
super(message);
}
public ICUCloneNotSupportedException(Throwable cause) {
super(cause);
}
public ICUCloneNotSupportedException(String message, Throwable cause) {
super(message, cause);
}
} | serialVersionUID | java | j2objc |
package org.elasticsearch.search.fetch;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.RefCounted;
import org.elasticsearch.core.SimpleRefCounted;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.transport.LeakTracker;
import java.io.IOException;
public final class FetchSearchResult extends SearchPhaseResult {
private SearchHits hits;
private transient int counter;
private ProfileResult profileResult;
private final RefCounted refCounted = LeakTracker.wrap(new SimpleRefCounted());
public FetchSearchResult() {}
public FetchSearchResult(ShardSearchContextId id, SearchShardTarget [MASK]) {
this.contextId = id;
setSearchShardTarget([MASK]);
}
public FetchSearchResult(StreamInput in) throws IOException {
contextId = new ShardSearchContextId(in);
hits = SearchHits.readFrom(in, true);
profileResult = in.readOptionalWriteable(ProfileResult::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
assert hasReferences();
contextId.writeTo(out);
hits.writeTo(out);
out.writeOptionalWriteable(profileResult);
}
@Override
public FetchSearchResult fetchResult() {
return this;
}
public void shardResult(SearchHits hits, ProfileResult profileResult) {
assert assertNoSearchTarget(hits);
assert hasReferences();
var existing = this.hits;
if (existing != null) {
existing.decRef();
}
this.hits = hits;
assert this.profileResult == null;
this.profileResult = profileResult;
}
private static boolean assertNoSearchTarget(SearchHits hits) {
for (SearchHit hit : hits.getHits()) {
assert hit.getShard() == null : "expected null but got: " + hit.getShard();
}
return true;
}
public SearchHits hits() {
assert hasReferences();
return hits;
}
public FetchSearchResult initCounter() {
counter = 0;
return this;
}
public int counterGetAndIncrement() {
return counter++;
}
public ProfileResult profileResult() {
return profileResult;
}
@Override
public void incRef() {
refCounted.incRef();
}
@Override
public boolean tryIncRef() {
return refCounted.tryIncRef();
}
@Override
public boolean decRef() {
if (refCounted.decRef()) {
deallocate();
return true;
}
return false;
}
private void deallocate() {
if (hits != null) {
hits.decRef();
hits = null;
}
}
@Override
public boolean hasReferences() {
return refCounted.hasReferences();
}
} | shardTarget | java | elasticsearch |
package org.springframework.boot.loader.jar;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
class ZipInflaterInputStream extends InflaterInputStream {
private final boolean ownsInflator;
private int available;
private boolean extraBytesWritten;
ZipInflaterInputStream(InputStream inputStream, int size) {
this(inputStream, new Inflater(true), size, true);
}
ZipInflaterInputStream(InputStream inputStream, Inflater [MASK], int size) {
this(inputStream, [MASK], size, false);
}
private ZipInflaterInputStream(InputStream inputStream, Inflater [MASK], int size, boolean ownsInflator) {
super(inputStream, [MASK], getInflaterBufferSize(size));
this.ownsInflator = ownsInflator;
this.available = size;
}
@Override
public int available() throws IOException {
if (this.available < 0) {
return super.available();
}
return this.available;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int result = super.read(b, off, len);
if (result != -1) {
this.available -= result;
}
return result;
}
@Override
public void close() throws IOException {
super.close();
if (this.ownsInflator) {
this.inf.end();
}
}
@Override
protected void fill() throws IOException {
try {
super.fill();
}
catch (EOFException ex) {
if (this.extraBytesWritten) {
throw ex;
}
this.len = 1;
this.buf[0] = 0x0;
this.extraBytesWritten = true;
this.inf.setInput(this.buf, 0, this.len);
}
}
private static int getInflaterBufferSize(long size) {
size += 2;
size = (size > 65536) ? 8192 : size;
size = (size <= 0) ? 4096 : size;
return (int) size;
}
} | inflater | java | spring-boot |
package org.springframework.boot.buildpack.platform.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
class ZipFileTarArchiveTests {
@TempDir
File tempDir;
@Test
void createWhenZipIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> new ZipFileTarArchive(null, Owner.ROOT))
.withMessage("'zip' must not be null");
}
@Test
void createWhenOwnerIsNullThrowsException() throws Exception {
File file = new File(this.tempDir, "test.zip");
writeTestZip(file);
assertThatIllegalArgumentException().isThrownBy(() -> new ZipFileTarArchive(file, null))
.withMessage("'[MASK]' must not be null");
}
@Test
void writeToAdaptsContent() throws Exception {
Owner [MASK] = Owner.of(123, 456);
File file = new File(this.tempDir, "test.zip");
writeTestZip(file);
TarArchive tarArchive = TarArchive.fromZip(file, [MASK]);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
tarArchive.writeTo(outputStream);
try (TarArchiveInputStream tarStream = new TarArchiveInputStream(
new ByteArrayInputStream(outputStream.toByteArray()))) {
TarArchiveEntry dirEntry = tarStream.getNextEntry();
assertThat(dirEntry.getName()).isEqualTo("spring/");
assertThat(dirEntry.getLongUserId()).isEqualTo(123);
assertThat(dirEntry.getLongGroupId()).isEqualTo(456);
TarArchiveEntry fileEntry = tarStream.getNextEntry();
assertThat(fileEntry.getName()).isEqualTo("spring/boot");
assertThat(fileEntry.getLongUserId()).isEqualTo(123);
assertThat(fileEntry.getLongGroupId()).isEqualTo(456);
assertThat(fileEntry.getSize()).isEqualTo(4);
assertThat(fileEntry.getMode()).isEqualTo(0755);
assertThat(tarStream).hasContent("test");
}
}
private void writeTestZip(File file) throws IOException {
try (ZipArchiveOutputStream zip = new ZipArchiveOutputStream(file)) {
ZipArchiveEntry dirEntry = new ZipArchiveEntry("spring/");
zip.putArchiveEntry(dirEntry);
zip.closeArchiveEntry();
ZipArchiveEntry fileEntry = new ZipArchiveEntry("spring/boot");
fileEntry.setUnixMode(0755);
zip.putArchiveEntry(fileEntry);
zip.write("test".getBytes(StandardCharsets.UTF_8));
zip.closeArchiveEntry();
}
}
} | owner | java | spring-boot |
package org.springframework.boot.gradle.tasks.bundling;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Calendar;
import java.util.TimeZone;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class DefaultTimeZoneOffsetTests {
@Test
void removeFromWithLongInDifferentTimeZonesReturnsSameValue() {
long time = OffsetDateTime.of(2000, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
TimeZone timeZone1 = TimeZone.getTimeZone("GMT");
TimeZone timeZone2 = TimeZone.getTimeZone("GMT+8");
TimeZone timeZone3 = TimeZone.getTimeZone("GMT-8");
long result1 = new DefaultTimeZoneOffset(timeZone1).removeFrom(time);
long result2 = new DefaultTimeZoneOffset(timeZone2).removeFrom(time);
long [MASK] = new DefaultTimeZoneOffset(timeZone3).removeFrom(time);
long dosTime1 = toDosTime(Calendar.getInstance(timeZone1), result1);
long dosTime2 = toDosTime(Calendar.getInstance(timeZone2), result2);
long dosTime3 = toDosTime(Calendar.getInstance(timeZone3), [MASK]);
assertThat(dosTime1).isEqualTo(dosTime2).isEqualTo(dosTime3);
}
@Test
void removeFromWithFileTimeReturnsFileTime() {
long time = OffsetDateTime.of(2000, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
long result = new DefaultTimeZoneOffset(TimeZone.getTimeZone("GMT+8")).removeFrom(time);
assertThat(result).isNotEqualTo(time).isEqualTo(946656000000L);
}
private long toDosTime(Calendar calendar, long time) {
calendar.setTimeInMillis(time);
final int year = calendar.get(Calendar.YEAR);
final int month = calendar.get(Calendar.MONTH) + 1;
return ((year - 1980) << 25) | (month << 21) | (calendar.get(Calendar.DAY_OF_MONTH) << 16)
| (calendar.get(Calendar.HOUR_OF_DAY) << 11) | (calendar.get(Calendar.MINUTE) << 5)
| (calendar.get(Calendar.SECOND) >> 1);
}
} | result3 | java | spring-boot |
package org.elasticsearch.painless.action;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.painless.lookup.PainlessClassBinding;
import org.elasticsearch.painless.lookup.PainlessInstanceBinding;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class PainlessContextInfo implements Writeable, ToXContentObject {
public static final ParseField NAME = new ParseField("name");
public static final ParseField CLASSES = new ParseField("classes");
public static final ParseField IMPORTED_METHODS = new ParseField("imported_methods");
public static final ParseField CLASS_BINDINGS = new ParseField("class_bindings");
public static final ParseField INSTANCE_BINDINGS = new ParseField("instance_bindings");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<PainlessContextInfo, Void> PARSER = new ConstructingObjectParser<>(
PainlessContextInfo.class.getCanonicalName(),
(v) -> new PainlessContextInfo(
(String) v[0],
(List<PainlessContextClassInfo>) v[1],
(List<PainlessContextMethodInfo>) v[2],
(List<PainlessContextClassBindingInfo>) v[3],
(List<PainlessContextInstanceBindingInfo>) v[4]
)
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> PainlessContextClassInfo.fromXContent(p), CLASSES);
PARSER.declareObjectArray(
ConstructingObjectParser.constructorArg(),
(p, c) -> PainlessContextMethodInfo.fromXContent(p),
IMPORTED_METHODS
);
PARSER.declareObjectArray(
ConstructingObjectParser.constructorArg(),
(p, c) -> PainlessContextClassBindingInfo.fromXContent(p),
CLASS_BINDINGS
);
PARSER.declareObjectArray(
ConstructingObjectParser.constructorArg(),
(p, c) -> PainlessContextInstanceBindingInfo.fromXContent(p),
INSTANCE_BINDINGS
);
}
private final String name;
private final List<PainlessContextClassInfo> classes;
private final List<PainlessContextMethodInfo> importedMethods;
private final List<PainlessContextClassBindingInfo> [MASK];
private final List<PainlessContextInstanceBindingInfo> instanceBindings;
public PainlessContextInfo(ScriptContext<?> scriptContext, PainlessLookup painlessLookup) {
this(
scriptContext.name,
painlessLookup.getClasses()
.stream()
.map(
javaClass -> new PainlessContextClassInfo(
javaClass,
javaClass == painlessLookup.canonicalTypeNameToType(
javaClass.getName().substring(javaClass.getName().lastIndexOf('.') + 1).replace('$', '.')
),
painlessLookup.lookupPainlessClass(javaClass)
)
)
.collect(Collectors.toList()),
painlessLookup.getImportedPainlessMethodsKeys().stream().map(importedPainlessMethodKey -> {
String[] split = importedPainlessMethodKey.split("/");
String importedPainlessMethodName = split[0];
int importedPainlessMethodArity = Integer.parseInt(split[1]);
PainlessMethod importedPainlessMethod = painlessLookup.lookupImportedPainlessMethod(
importedPainlessMethodName,
importedPainlessMethodArity
);
return new PainlessContextMethodInfo(importedPainlessMethod);
}).collect(Collectors.toList()),
painlessLookup.getPainlessClassBindingsKeys().stream().map(painlessClassBindingKey -> {
String[] split = painlessClassBindingKey.split("/");
String painlessClassBindingName = split[0];
int painlessClassBindingArity = Integer.parseInt(split[1]);
PainlessClassBinding painlessClassBinding = painlessLookup.lookupPainlessClassBinding(
painlessClassBindingName,
painlessClassBindingArity
);
return new PainlessContextClassBindingInfo(painlessClassBinding);
}).collect(Collectors.toList()),
painlessLookup.getPainlessInstanceBindingsKeys().stream().map(painlessInstanceBindingKey -> {
String[] split = painlessInstanceBindingKey.split("/");
String painlessInstanceBindingName = split[0];
int painlessInstanceBindingArity = Integer.parseInt(split[1]);
PainlessInstanceBinding painlessInstanceBinding = painlessLookup.lookupPainlessInstanceBinding(
painlessInstanceBindingName,
painlessInstanceBindingArity
);
return new PainlessContextInstanceBindingInfo(painlessInstanceBinding);
}).collect(Collectors.toList())
);
}
public PainlessContextInfo(
String name,
List<PainlessContextClassInfo> classes,
List<PainlessContextMethodInfo> importedMethods,
List<PainlessContextClassBindingInfo> [MASK],
List<PainlessContextInstanceBindingInfo> instanceBindings
) {
this.name = Objects.requireNonNull(name);
classes = new ArrayList<>(Objects.requireNonNull(classes));
classes.sort(Comparator.comparing(PainlessContextClassInfo::getSortValue));
this.classes = Collections.unmodifiableList(classes);
importedMethods = new ArrayList<>(Objects.requireNonNull(importedMethods));
importedMethods.sort(Comparator.comparing(PainlessContextMethodInfo::getSortValue));
this.importedMethods = Collections.unmodifiableList(importedMethods);
[MASK] = new ArrayList<>(Objects.requireNonNull([MASK]));
[MASK].sort(Comparator.comparing(PainlessContextClassBindingInfo::getSortValue));
this.[MASK] = Collections.unmodifiableList([MASK]);
instanceBindings = new ArrayList<>(Objects.requireNonNull(instanceBindings));
instanceBindings.sort(Comparator.comparing(PainlessContextInstanceBindingInfo::getSortValue));
this.instanceBindings = Collections.unmodifiableList(instanceBindings);
}
public PainlessContextInfo(StreamInput in) throws IOException {
name = in.readString();
classes = in.readCollectionAsImmutableList(PainlessContextClassInfo::new);
importedMethods = in.readCollectionAsImmutableList(PainlessContextMethodInfo::new);
[MASK] = in.readCollectionAsImmutableList(PainlessContextClassBindingInfo::new);
instanceBindings = in.readCollectionAsImmutableList(PainlessContextInstanceBindingInfo::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeCollection(classes);
out.writeCollection(importedMethods);
out.writeCollection([MASK]);
out.writeCollection(instanceBindings);
}
public static PainlessContextInfo fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME.getPreferredName(), name);
builder.field(CLASSES.getPreferredName(), classes);
builder.field(IMPORTED_METHODS.getPreferredName(), importedMethods);
builder.field(CLASS_BINDINGS.getPreferredName(), [MASK]);
builder.field(INSTANCE_BINDINGS.getPreferredName(), instanceBindings);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PainlessContextInfo that = (PainlessContextInfo) o;
return Objects.equals(name, that.name)
&& Objects.equals(classes, that.classes)
&& Objects.equals(importedMethods, that.importedMethods)
&& Objects.equals([MASK], that.[MASK])
&& Objects.equals(instanceBindings, that.instanceBindings);
}
@Override
public int hashCode() {
return Objects.hash(name, classes, importedMethods, [MASK], instanceBindings);
}
@Override
public String toString() {
return "PainlessContextInfo{"
+ "name='"
+ name
+ '\''
+ ", classes="
+ classes
+ ", importedMethods="
+ importedMethods
+ ", [MASK]="
+ [MASK]
+ ", instanceBindings="
+ instanceBindings
+ '}';
}
public String getName() {
return name;
}
public List<PainlessContextClassInfo> getClasses() {
return classes;
}
public List<PainlessContextMethodInfo> getImportedMethods() {
return importedMethods;
}
public List<PainlessContextClassBindingInfo> getClassBindings() {
return [MASK];
}
public List<PainlessContextInstanceBindingInfo> getInstanceBindings() {
return instanceBindings;
}
} | classBindings | java | elasticsearch |
package proguard.util;
import java.util.List;
public class ListParser implements StringParser
{
private final StringParser stringParser;
public ListParser(StringParser stringParser)
{
this.stringParser = stringParser;
}
public StringMatcher parse(String regularExpression)
{
return parse(ListUtil.commaSeparatedList(regularExpression));
}
public StringMatcher parse(List regularExpressions)
{
StringMatcher listMatcher = null;
for (int index = regularExpressions.size()-1; index >= 0; index--)
{
String regularExpression = (String)regularExpressions.get(index);
StringMatcher entryMatcher = parseEntry(regularExpression);
listMatcher =
listMatcher == null ?
(StringMatcher)entryMatcher :
isNegated(regularExpression) ?
(StringMatcher)new AndMatcher(entryMatcher, listMatcher) :
(StringMatcher)new OrMatcher(entryMatcher, listMatcher);
}
return listMatcher != null ? listMatcher : new ConstantMatcher(true);
}
private StringMatcher parseEntry(String regularExpression)
{
return isNegated(regularExpression) ?
new NotMatcher(stringParser.parse(regularExpression.substring(1))) :
stringParser.parse(regularExpression);
}
private boolean isNegated(String regularExpression)
{
return regularExpression.length() > 0 &&
regularExpression.charAt(0) == '!';
}
public static void main(String[] args)
{
try
{
System.out.println("Regular expression ["+args[0]+"]");
ListParser [MASK] = new ListParser(new NameParser());
StringMatcher matcher = [MASK].parse(args[0]);
for (int index = 1; index < args.length; index++)
{
String string = args[index];
System.out.print("String ["+string+"]");
System.out.println(" -> match = "+matcher.matches(args[index]));
}
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
} | parser | java | bazel |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.