doc_id
stringlengths
36
36
metadata
dict
input
stringlengths
28
4.3k
output
stringlengths
11
5.45k
0737a881-a2fb-4ea2-bd52-187a8637e6f5
{ "language": "Arduino" }
```arduino ``` Add arduino serial test sketch
```arduino void setup() { Serial.begin(9600); Serial.println("Application Started"); } void loop() { Serial.println("Hello From The Other Side!"); delay(1000); } ```
e09c9f4c-21c5-402d-ac52-0cfa3242ccdf
{ "language": "Arduino" }
```arduino ``` Add string equivalent example for teensy
```arduino /* Prototypical arduino/teensy code. This one sends things as chars, which allows it to be a bit more flexible, at the cost of efficiency. For example, sending the timestamp + two analog channels takes 8 bytes in the more efficient code, but ~15 bytes in this code. Additionally, you would need to parse the string on the other side. The plus is that the output is human-readable. */ #include "ADC.h" #include "IntervalTimer.h" // only the lines below needs to change // first line does which analog channels to read, // second line sets the sampling interval (in microseconds) const unsigned int channel_array[2] = {A0, A7}; const unsigned long period_0 = 100000; const unsigned int array_size = sizeof(channel_array) / sizeof(int); unsigned int value_array[array_size]; unsigned int ii = 0; volatile bool go_flag = false; bool go_flag_copy = false; elapsedMicros current_time; unsigned long current_time_copy = 0; IntervalTimer timer_0; ADC *adc = new ADC(); void setup() { for(ii = 0; ii < array_size; ii++) { pinMode(channel_array[ii], INPUT); } Serial.begin(9600); delay(1000); adc->setReference(ADC_REF_3V3, ADC_0); adc->setAveraging(8); adc->setResolution(12); adc->setConversionSpeed(ADC_HIGH_SPEED); adc->setSamplingSpeed(ADC_HIGH_SPEED); timer_0.priority(10); timer_0.begin(timerCallback, period_0); delay(500); } FASTRUN void timerCallback(void) { go_flag = true; } void loop() { while(!go_flag_copy) { noInterrupts(); go_flag_copy = go_flag; interrupts(); } go_flag_copy = false; go_flag = false; current_time_copy = current_time; for (ii = 0; ii < array_size; ii++) { value_array[ii] = adc->analogRead(channel_array[ii]); } Serial.print(current_time_copy); Serial.print(" "); for (ii = 0; ii < array_size; ii++) { Serial.print(value_array[ii]); Serial.print(" "); } Serial.print("\n"); } ```
b99b6046-a590-4062-86e5-dbbca3b798a9
{ "language": "Arduino" }
```arduino ``` Add demo which uses the Wire library
```arduino // This example demonstrates how to use the HIH61xx class with the Wire library. The HIH61xx state machine // enables others tasks to run whilst the HIH61xx is powering up etc. #include <Wire.h> #include <HIH61xx.h> #include <AsyncDelay.h> // The "hih" object must be created with a reference to the "Wire" object which represents the I2C bus it is using. // Note that the class for the Wire object is called "TwoWire", and must be included in the templated class name. HIH61xx<TwoWire> hih(Wire); AsyncDelay samplingInterval; void setup(void) { #if F_CPU >= 12000000UL Serial.begin(115200); #else Serial.begin(9600); #endif Wire.begin(); hih.initialise(); samplingInterval.start(3000, AsyncDelay::MILLIS); } bool printed = true; void loop(void) { if (samplingInterval.isExpired() && !hih.isSampling()) { hih.start(); printed = false; samplingInterval.repeat(); Serial.println("Sampling started (using Wire library)"); } hih.process(); if (hih.isFinished() && !printed) { printed = true; // Print saved values Serial.print("RH: "); Serial.print(hih.getRelHumidity() / 100.0); Serial.println(" %"); Serial.print("Ambient: "); Serial.print(hih.getAmbientTemp() / 100.0); Serial.println(" deg C"); Serial.print("Status: "); Serial.println(hih.getStatus()); } } ```
0a77ff0c-a01a-4113-9513-9208cb980389
{ "language": "Arduino" }
```arduino ``` Add draft of Arduino Read SD/Write Lights loop
```arduino #include <SPI.h> #include <SdFat.h> #include <FAB_LED.h> apa106<D, 6> LEDstrip; rgb frame[200]; // Test with reduced SPI speed for breadboards. // Change spiSpeed to SPI_FULL_SPEED for better performance // Use SPI_QUARTER_SPEED for even slower SPI bus speed const uint8_t spiSpeed = SPI_FULL_SPEED; //------------------------------------------------------------------------------ // File system object. SdFat sd; // Serial streams ArduinoOutStream cout(Serial); // SD card chip select const int chipSelect = 4; void setup() { Serial.begin(9600); // Wait for USB Serial while (!Serial) { SysCall::yield(); } cout << F("\nInitializing SD.\n"); if (!sd.begin(chipSelect, spiSpeed)) { if (sd.card()->errorCode()) { cout << F("SD initialization failed.\n"); cout << F("errorCode: ") << hex << showbase; cout << int(sd.card()->errorCode()); cout << F(", errorData: ") << int(sd.card()->errorData()); cout << dec << noshowbase << endl; return; } cout << F("\nCard successfully initialized.\n"); if (sd.vol()->fatType() == 0) { cout << F("Can't find a valid FAT16/FAT32 partition.\n"); return; } if (!sd.vwd()->isOpen()) { cout << F("Can't open root directory.\n"); return; } cout << F("Can't determine error type\n"); return; } cout << F("\nCard successfully initialized.\n"); cout << endl; if (!sd.exists("FRACTAL1.DAT")) { cout << F("FRACTAL1.DAT file not found.\n"); return; } File infile = sd.open("FRACTAL1.DAT"); if (!infile.isOpen()) { cout << F("Failed to open FRACTAL1.DAT\n"); return; } int bytes_read = infile.read(frame, sizeof(frame)); unsigned long prev_millis = millis(); cout << F("\nFrame size in bytes: ") << sizeof(frame); cout << F("\nStarting millis: ") << prev_millis; int i = 0; while (bytes_read == sizeof(frame)) { ++i; while (millis() - prev_millis < 50UL) { // busy loop until its time to paint the lights } prev_millis += 50UL; LEDstrip.sendPixels(sizeof(frame) / sizeof(*frame), frame); bytes_read = infile.read(frame, sizeof(frame)); } cout << F("\nFinal millis: ") << prev_millis; cout << F("\nNum frames: ") << i; } void loop() { } ```
6da43531-f46b-4051-b8e0-47fb26435122
{ "language": "Arduino" }
```arduino ``` Add example of outputting an analog read value
```arduino /* AnalogReadEasyctrl Reads an analog input on pin 0. Attach the center pin of a potentiometer to pin A0, and the outside pins to +5V and ground. This example code is based on the Arduino example AnalogReadSerial */ #include "easyctrl.h" Monitored<int> sensorValue("sensorValue"); // the setup routine runs once when you press reset: void setup() { // initialize serial communication at 115200 bits per second: Serial.begin(115200); Easyctrl.begin("AnalogReadEasyctrl", Serial); } // the loop routine runs over and over again forever: void loop() { // read the input on analog pin 0: sensorValue = analogRead(A0); // Let easyctrl run Easyctrl.update(); delay(1); // delay in between reads for stability } ```
d530601e-3a96-4db6-bfd7-1ddfd13b55db
{ "language": "Arduino" }
```arduino ``` Add example code for Kelvinator A/C control.
```arduino #include <IRKelvinator.h> IRKelvinatorAC kelvir(D1); // IR led controlled by Pin D1. void printState() { // Display the settings. Serial.println("Kelvinator A/C remote is in the following state:"); Serial.printf(" Basic\n Power: %d, Mode: %d, Temp: %dC, Fan Speed: %d\n", kelvir.getPower(), kelvir.getMode(), kelvir.getTemp(), kelvir.getFan()); Serial.printf(" Options\n X-Fan: %d, Light: %d, Ion Filter: %d\n", kelvir.getXFan(), kelvir.getLight(), kelvir.getIonFilter()); Serial.printf(" Swing (V): %d, Swing (H): %d, Turbo: %d, Quiet: %d\n", kelvir.getSwingVertical(), kelvir.getSwingHorizontal(), kelvir.getTurbo(), kelvir.getQuiet()); // Display the encoded IR sequence. unsigned char* ir_code = kelvir.getRaw(); Serial.print("IR Code: 0x"); for (int i = 0; i < KELVINATOR_STATE_LENGTH; i++) Serial.printf("%02X", ir_code[i]); Serial.println(); } void setup(){ kelvir.begin(); Serial.begin(115200); delay(200); // Set up what we want to send. See IRKelvinator.cpp for all the options. // Most things default to off. Serial.println("Default state of the remote."); printState(); Serial.println("Setting desired state for A/C."); kelvir.on(); kelvir.setFan(1); kelvir.setMode(KELVINATOR_COOL); kelvir.setTemp(26); kelvir.setSwingVertical(false); kelvir.setSwingHorizontal(true); kelvir.setXFan(true); kelvir.setIonFilter(false); kelvir.setLight(true); } void loop() { // Now send the IR signal. Serial.println("Sending IR command to A/C ..."); kelvir.send(); printState(); delay(5000); } ```
e8c9436a-113d-49db-a2ba-1147b7416032
{ "language": "Arduino" }
```arduino ``` Add validation of DS18B20 temperature calculation.
```arduino /** * @file CosaDS18B20calc.ino * @version 1.0 * * @section License * Copyright (C) 2015, Mikael Patel * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * @section Description * * This file is part of the Arduino Che Cosa project. */ #include "Cosa/Trace.hh" #include "Cosa/IOStream/Driver/UART.hh" #include "Cosa/Watchdog.hh" void setup() { uart.begin(9600); trace.begin(&uart, PSTR("CosaDS18B20calc: started")); } int32_t iscale(int16_t temp) { bool negative = false; if (temp < 0) { temp = -temp; negative = true; } int32_t res = (temp >> 4) * 1000L + (625 * (temp & 0xf)); return (negative ? -res : res); } float32_t fscale(int16_t temp) { return (temp * 0.0625); } #define CHECK(c,t) trace << PSTR(#c "=") << fscale(t) << endl void loop() { CHECK(+125, 0x07D0); CHECK(+85, 0x0550); CHECK(+25.0625, 0x0191); CHECK(+10.125, 0x00A2); CHECK(+0.5, 0x0008); CHECK(0, 0x0000); CHECK(-0.5, 0xFFF8); CHECK(-10.125, 0xFF5E); CHECK(-25.0625, 0xFE6F); CHECK(-55, 0xFC90); ASSERT(true == false); } ```
e36a8b6a-c01c-4b5a-a201-7c80d20ff6ed
{ "language": "Arduino" }
```arduino ``` Add Kevyn's original clear button sketch
```arduino /* Kevyn McPhail Deeplocal FOB Receiving module code If Button A is pressed the the arduino returns 1, if button 2 is pressed the arduino returns 2 Button A input is PIN 3, Button B input is PIN 2, and the momentary button press input is PIN 4. On the R02A receiving module, Button A is output D2, Button B is output D3, Momentary button press is output VT. Hardware: Sparkfun Pro Micro 5V/16MHz */ void setup(){ Serial.begin(9600); for (int i = 2; i<5; i++){ pinMode(i, INPUT); } } int firstPin; int secondPin; int thirdPin; void loop(){ firstPin = digitalRead(3); secondPin = digitalRead(2); thirdPin = digitalRead(4); if (firstPin == 1 & secondPin == 0 & thirdPin == 1) { Serial.println(1); delay(200); } if (firstPin == 0 & secondPin == 1 & thirdPin == 1) { Serial.println(2); delay(200); } } ```
00a7e2d6-fade-4e2c-929e-f57ff1743928
{ "language": "Arduino" }
```arduino ``` Add file to allow saving arduino output from serial
```arduino /* Header file that allows for writing of data from arduino to pi */ #ifndef serial-print_h #define serial-print_h #include "Arduino.h" import processing.serial.*; Serial mySerial; PrintWriter output; void setup() { mySerial = new Serial( this, Serial.list()[0], 9600 ); output = createWriter( "data.txt" ); } void draw() { if (mySerial.available() > 0 ) { String value = mySerial.readString(); if ( value != null ) { output.println( value ); } } } void keyPressed() { output.flush(); // Writes the remaining data to the file output.close(); // Finishes the file exit(); // Stops the program } #endif ```
773259b1-707b-4fd2-9950-7f0e3755206b
{ "language": "Arduino" }
```arduino ``` Add simple example for the library
```arduino #include <Arduino.h> #include <SPI.h> #include <ssd1351.h> // use this to do Color c = RGB(...) instead of `RGB c = RGB(...)` or ssd1351::LowColor c = RGB(...) // because it's slightly faster and guarantees you won't be sending wrong colours to the display. // Choose color depth - LowColor and HighColor currently supported // typedef ssd1351::LowColor Color; typedef ssd1351::HighColor Color; // Choose display buffering - NoBuffer or SingleBuffer currently supported // auto display = ssd1351::SSD1351<Color, ssd1351::NoBuffer, 128, 96>(); auto display = ssd1351::SSD1351<Color, ssd1351::SingleBuffer, 128, 96>(); bool up = false; int pos = 127; const int particles = 256; int offsets[particles]; int x_pos[particles]; int y_pos[particles]; Color particle_colors[particles]; void setup() { Serial.begin(9600); Serial.println("Booting..."); display.begin(); Serial.println("Display set up."); for (int i = 0; i < particles; i++) { x_pos[i] = random(0, 128); y_pos[i] = random(0, 96); particle_colors[i] = ssd1351::RGB(0, i + 10, i/2 + 10); } } void loop() { unsigned long before = millis(); display.fillScreen(ssd1351::RGB()); Color circleColor = ssd1351::RGB(0, 128, 255); for (int i = 0; i < particles; i++) { offsets[i] += random(-2, 3); display.drawLine( x_pos[i] + offsets[i], y_pos[i] + offsets[i], pos, 80 + sin(pos / 4.0) * 20, particle_colors[i] ); display.drawCircle( x_pos[i] + offsets[i], y_pos[i] + offsets[i], 1, circleColor ); } display.updateScreen(); Serial.println(millis() - before); if (up) { pos++; if (pos >= 127) { up = false; } } else { pos--; if (pos < 0) { up = true; } } } ```
162d6c72-e745-4ce7-ab02-bfc07519b2dc
{ "language": "Arduino" }
```arduino ``` Add example of a custom function in ethernet
```arduino #include <SPI.h> #include <Ethernet.h> #include "VdlkinoEthernet.h" byte mac[] = { 0xDE, 0xAD, 0xBE, 0xEF, 0xFE, 0xED }; IPAddress ip(192,168,1,177); EthernetServer server(80); VdlkinoEthernet vdlkino(14, 6, &server); void url_get_analog_byte(void *block, char *url) { VdlkinoBlock *vblock = (VdlkinoBlock*) block; char buffer[256]; char *pc = buffer; strcpy(pc, url); if (pc[0] == '/') { pc++; } pc = strtok(pc, "/"); if (strcmp(pc, "analogbyte")) { return; } pc = strtok(NULL, "/"); if (pc == NULL) { return; } vblock->pin = atoi(pc); pc = strtok(NULL, "/"); if (pc != NULL) { return; } vblock->oper = 8; vblock->value = 0; vblock->valid = 1; } uint16_t get_analog_byte(void *block) { VdlkinoBlock *vblock = (VdlkinoBlock*) block; return map(analogRead(vblock->pin), 0, 1023, 0, 255); } void setup() { Ethernet.begin(mac, ip); server.begin(); vdlkino.operations[8] = &get_analog_byte; vdlkino.addUrl(&url_get_analog_byte); } void loop() { vdlkino.run(); } ```
b49c0dec-b27f-4214-9fc9-efc29c72eead
{ "language": "Arduino" }
```arduino ``` Add example for using Arduino to read ATU data
```arduino // Copyright 2013 David Turnbull AE9RB // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This is an example application for Arduino to read band // data from the Peaberry V2 ATU port. Typical usage is to // change the band on your amplifier or switch antennas. // No support circuitry is needed. // Connect a 1/8" jack directly to Arduino pin. // Leave the ring unconnected, it is not used. #define ATU_0_PIN 2 void setup() { Serial.begin(9600); } // This example prints the band number to the serial // port whenever a change is detected. void loop() { static int band = 0; int i = atu_read(); if (i) { if (band != i) { Serial.println(i); } band = i; } } // Returns a non-zero value when the read is complete. int atu_read() { static int state = 5, data[4], previous; static long t; long m = micros(); int i, ret = 0; if (state < 6) switch(state) { default: i = digitalRead(ATU_0_PIN); if (m - t > 7000) state = 5; if (previous == HIGH && i == LOW) { data[state] = m - t; state++; } if (previous == LOW && i == HIGH) { t = m; } previous = i; break; case 4: for (i=0; i<4; i++) { ret <<= 1; if (data[i] > 2750) ret |= 0x01; } //nobreak; case 5: t = m + 50000; digitalWrite(ATU_0_PIN, LOW); state = 6; break; } else if (t - m < 0) switch(state) { case 6: t = m + 50000; digitalWrite(ATU_0_PIN, HIGH); state = 7; break; case 7: t = m + 5000; digitalWrite(ATU_0_PIN, LOW); state = 8; break; case 8: t = m; previous = LOW; state = 0; break; } return ret; } ```
5ec747fb-b94a-4894-801a-2bf059dc4a93
{ "language": "Arduino" }
```arduino ``` Add example for sending custom data packets
```arduino #include <DSPI.h> #include <OpenBCI_32bit_Library.h> #include <OpenBCI_32Bit_Library_Definitions.h> unsigned long timer = 0; byte LEDState = 0; void setup() { // Bring up the OpenBCI Board board.begin(); timer = millis(); LEDState = 1; digitalWrite(OPENBCI_PIN_LED,HIGH); } void loop() { // Downsample if ((millis() - timer) > 20) { // Save new time timer = millis(); sendLEDStatus(); } // Check the serial port for new data if (board.hasDataSerial0()) { // Read one char and process it char c = board.getCharSerial0(); if (c == '0') { // Make the LED turn OFF when a '0' is sent from the PC digitalWrite(OPENBCI_PIN_LED,LOW); LEDState = 0; } else if (c == '1') { // Make the LED turn ON when a '1' is sent from the PC digitalWrite(OPENBCI_PIN_LED,HIGH); LEDState = 1; } } } void sendLEDStatus() { // Must have header byte Serial0.write('A'); // 0x41 1 byte // Write the LED state Serial0.write(LEDState); // 1 byte // Fill the rest with fake data for (int i = 0; i < 30; i++) { Serial0.write(0x00); } // Send a stop byte with an `B` or `1011` in the last nibble to indicate a // different packet type. Serial0.write(0xCB); // 1 byte } ```
a57fcb84-58d4-4a9f-b041-0bf9a51a0faa
{ "language": "Arduino" }
```arduino ``` Test for photoresistor on sensor board made in class.
```arduino #define LED P1_3 #define Sensor P1_4 float reading; void setup() { // put your setup code here, to run once: Serial.begin(9600); pinMode(LED, OUTPUT); pinMode(Sensor, INPUT); } void loop() { // put your main code here, to run repeatedly: // Turn on LED digitalWrite(LED, HIGH); // Read sensor int i; for (i=1;i<10;i++) { reading = analogRead(Sensor); Serial.print(reading); Serial.print(",\n"); delay(100); } // Turn on LED digitalWrite(LED, LOW); for (i=1;i<10;i++) { reading = analogRead(Sensor); Serial.print(reading); Serial.print(",\n"); delay(100); } } ```
057ef2f1-3495-43ac-b658-8b64bb82f323
{ "language": "Arduino" }
```arduino ``` Test of new simple functions in L9110 library
```arduino // Test L9110 library #include <L9110.h> L9110 L9110; // setup pin for LiPo monitor int LiPoMonitor = 2; void setup() { pinMode(LiPoMonitor, INPUT); } void loop() { if (digitalRead(LiPoMonitor) == LOW) { L9110.forward(); delay(3000); L9110.fullStop(); delay(500); L9110.reverse(); delay(3000); L9110.fullStop(); delay(500); L9110.forwardSlow(); delay(3000); L9110.fullStop(); delay(500); L9110.reverseSlow(); delay(3000); L9110.fullStop(); delay(500); L9110.left(); delay(1500); L9110.fullStop(); delay(500); L9110.right(); delay(1500); L9110.fullStop(); delay(3000); } } ```
57315abd-3c6a-4e6e-b47d-f213ef8563e5
{ "language": "Arduino" }
```arduino ``` Add an example for Arduino Micro (Atmega32u4)
```arduino /* Example sketch for the PCF8574 for the purposes of showing how to use the interrupt-pin. Attach the positive lead of an LED to PIN7 on the PCF8574 and the negative lead to GND, a wire from Arduino-pin 13 to pin 3 on the PCF8474, a wire from the int-pin on the PCF8574 to Arduino-pin 7 and wires for SDA and SCL to Arduino-pins 2 and 3, respectively. If all goes well you should see the small blue LED on the ESP-module lighting up and the LED connected to the PCF going off, and vice versa. */ #include <pcf8574_esp.h> /* We need to set up the I2C-bus for the library to use */ #include <Wire.h> // Initialize a PCF8574 at I2C-address 0x20 PCF857x pcf8574(0x20, &Wire); //If you had a PCF8575 instead you'd use the below format //PCF857x pcf8575(0x20, &Wire, true); bool PCFInterruptFlag = false; void PCFInterrupt() { PCFInterruptFlag = true; } void setup() { Serial.begin(115200); delay(5000); Serial.println(F("Firing up...")); pinMode(13, OUTPUT); Wire.begin(); //Set to 400KHz Wire.setClock(400000L); pcf8574.begin(); // Most ready-made PCF8574-modules seem to lack an internal pullup-resistor, so you have to use the MCU-internal one. pinMode(7, INPUT_PULLUP); pcf8574.resetInterruptPin(); attachInterrupt(digitalPinToInterrupt(7), PCFInterrupt, FALLING); } void loop() { if(PCFInterruptFlag){ Serial.println(F("Got an interrupt: ")); if(pcf8574.read(3)==1) Serial.println("Pin 3 is HIGH!"); else Serial.println("Pin 3 is LOW!"); // DO NOTE: When you write LOW to a pin on a PCF8574 it becomes an OUTPUT. // It wouldn't generate an interrupt if you were to connect a button to it that pulls it HIGH when you press the button. // Any pin you wish to use as input must be written HIGH and be pulled LOW to generate an interrupt. pcf8574.write(7, pcf8574.read(3)); PCFInterruptFlag=false; } Serial.println(F("Blink.")); if(digitalRead(13)==HIGH) digitalWrite(13, LOW); else digitalWrite(13, HIGH); delay(1000); } ```
3bf6101d-5256-417e-a658-28a612258464
{ "language": "Arduino" }
```arduino ``` Add low speed serial transmitter example
```arduino void setup() { pinMode(LED_BUILTIN, OUTPUT); SerialUSB.begin(2000000); } void loop() { static int counter = 0; SerialUSB.println(counter, DEC); counter = (counter + 1) % (1 << 8); digitalWrite(LED_BUILTIN, counter >> 7 ? HIGH : LOW); delay(20); } ```
d7e96e98-1004-4605-8c3c-dfbf8dcb5993
{ "language": "Arduino" }
```arduino ``` Add On Chip Calibration example
```arduino #include<CS5490.h> #define rx 11 #define tx 12 /* Choose your board */ /* Arduino UNO and ESP8622 */ CS5490 line(MCLK_default,rx,tx); /* ESP and MEGA (Uses Serial2)*/ //CS5490 line(MCLK_default); void setup() { //Initializing communication with CS5490 //600 is the default baud rate velocity. line.begin(600); //Initializing communication arduino/PC to show results in Monitor Serial Serial.begin(115200); // wait for serial port to connect. Needed for Leonardo only while (!Serial); //Set to continous conversion line.contConv(); delay(100); } void loop() { double foo; Serial.println("\n\nWithout calibration"); line.setDcOffsetI(0); //Reset previous calibration foo = line.getInstI(); Serial.print("DC current value: "); Serial.println(foo, 5); foo = line.getDcOffsetI(); Serial.print("DC offset current value: "); Serial.println(foo, 5); /* -------->Types DCoffset ACoffset Gain -------->Channels Current Voltage CurrentAndVoltage -------->How to use? line.calibrate(type,channel) */ line.calibrate(DCoffset,Current); Serial.println("\n\nCalibrated"); foo = line.getInstI(); Serial.print("DC current value: "); Serial.println(foo, 5); foo = line.getDcOffsetI(); Serial.print("DC offset current value: "); Serial.println(foo, 5); Serial.println("\nReset arduino to see it again... "); while(1); } ```
0a50bf7f-61c5-4096-a3dd-3669aa905832
{ "language": "AsciiDoc" }
```asciidoc This is a small Java library for parsing the Cloud Foundry environment variables (VCAP_SERVICES and so on). // the first line of this file is used as a description in the POM, so keep it short and sweet! Download from Bintray: image::https://api.bintray.com/packages/pivotal-labs-london/maven/cf-env/images/download.svg[link="https://bintray.com/pivotal-labs-london/maven/cf-env/_latestVersion"] Build with Gradle: -------------------------------------- ./gradlew build -------------------------------------- Release with Gradle: -------------------------------------- # you probably want to make these changes manually rather than like this sed -i -e "s/^version = .*/version = 'x.y.z'/" build.gradle echo -e "bintrayUser=pivotal-labs-london\nbintrayKey=..." >gradle.properties ./gradlew bintrayUpload -------------------------------------- ``` Document how to use the Bintray repo
```asciidoc This is a small Java library for parsing the Cloud Foundry environment variables (VCAP_SERVICES and so on). // the first line of this file is used as a description in the POM, so keep it short and sweet! Download from Bintray: image::https://api.bintray.com/packages/pivotal-labs-london/maven/cf-env/images/download.svg[link="https://bintray.com/pivotal-labs-london/maven/cf-env/_latestVersion"] Use as a dependency in your build: -------------------------------------- repositories { jcenter() // or mavenCentral, for transitive dependencies maven { url = 'http://dl.bintray.com/pivotal-labs-london/maven/' } } dependencies { compile group: 'io.pivotal.labs', name: 'cf-env', version: '0.0.1' } -------------------------------------- We hope to have the artifact available via JCenter soon, but until then, please use the repository on Bintray. Build with Gradle: -------------------------------------- ./gradlew build -------------------------------------- Release with Gradle: -------------------------------------- # you probably want to make these changes manually rather than like this sed -i -e "s/^version = .*/version = 'x.y.z'/" build.gradle echo -e "bintrayUser=pivotal-labs-london\nbintrayKey=..." >gradle.properties ./gradlew bintrayUpload -------------------------------------- ```
55d40634-c6b5-4a1d-b155-154c3fd82084
{ "language": "AsciiDoc" }
```asciidoc = Auxly image:http://img.shields.io/:license-mit-blue.svg["License", link="https://github.com/jeffrimko/Qprompt/blob/master/LICENSE"] image:https://travis-ci.org/jeffrimko/Auxly.svg?branch=master["Build Status"] == Introduction This project provides a Python 2.7/3.x library for common tasks especially when writing shell-like scripts. Some of the functionality overlaps with the standard library but the API is slightly modified. == Status The status of this project is **pre-alpha**. This project is not yet suitable for use other than testing. == Requirements Auxly should run on any Python 2.7/3.x interpreter without additional dependencies. == Usage === The following are basic examples of Auxly (all examples can be found https://github.com/jeffrimko/Auxly/tree/master/examples[here]): - https://github.com/jeffrimko/Auxly/blob/master/examples/delete_1.py[examples/delete_1.py] - Deletes all PYC files in the project. ---- include::examples\delete_1.py[] ---- ``` Undo include change in readme.
```asciidoc = Auxly image:http://img.shields.io/:license-mit-blue.svg["License", link="https://github.com/jeffrimko/Qprompt/blob/master/LICENSE"] image:https://travis-ci.org/jeffrimko/Auxly.svg?branch=master["Build Status"] == Introduction This project provides a Python 2.7/3.x library for common tasks especially when writing shell-like scripts. Some of the functionality overlaps with the standard library but the API is slightly modified. == Status The status of this project is **pre-alpha**. This project is not yet suitable for use other than testing. == Requirements Auxly should run on any Python 2.7/3.x interpreter without additional dependencies. == Usage The following are basic examples of Auxly (all examples can be found https://github.com/jeffrimko/Auxly/tree/master/examples[here]): - https://github.com/jeffrimko/Auxly/blob/master/examples/delete_1.py[examples/delete_1.py] - Deletes all PYC files in the project. ```
31bd26be-42bf-4347-ae0f-7f16d45d210e
{ "language": "AsciiDoc" }
```asciidoc // Module included in the following assemblies: // // * serverless/configuring-knative-serving-autoscaling.adoc [id="configuring-scale-bounds-knative_{context}"] = Configuring scale bounds Knative Serving autoscaling The `minScale` and `maxScale` annotations can be used to configure the minimum and maximum number of Pods that can serve applications. These annotations can be used to prevent cold starts or to help control computing costs. minScale:: If the `minScale` annotation is not set, Pods will scale to zero (or to 1 if enable-scale-to-zero is false per the `ConfigMap`). maxScale:: If the `maxScale` annotation is not set, there will be no upper limit for the number of Pods created. `minScale` and `maxScale` can be configured as follows in the revision template: [source,yaml] ---- spec: template: metadata: autoscaling.knative.dev/minScale: "2" autoscaling.knative.dev/maxScale: "10" ---- Using these annotations in the revision template will propagate this confguration to `PodAutoscaler` objects. [NOTE] ==== These annotations apply for the full lifetime of a revision. Even when a revision is not referenced by any route, the minimal Pod count specified by `minScale` will still be provided. Keep in mind that non-routeable revisions may be garbage collected, which enables Knative to reclaim the resources. ==== ``` Fix spec.template.metadata.annotations for min and max scale example
```asciidoc // Module included in the following assemblies: // // * serverless/configuring-knative-serving-autoscaling.adoc [id="configuring-scale-bounds-knative_{context}"] = Configuring scale bounds Knative Serving autoscaling The `minScale` and `maxScale` annotations can be used to configure the minimum and maximum number of Pods that can serve applications. These annotations can be used to prevent cold starts or to help control computing costs. minScale:: If the `minScale` annotation is not set, Pods will scale to zero (or to 1 if enable-scale-to-zero is false per the `ConfigMap`). maxScale:: If the `maxScale` annotation is not set, there will be no upper limit for the number of Pods created. `minScale` and `maxScale` can be configured as follows in the revision template: [source,yaml] ---- spec: template: metadata: annotations: autoscaling.knative.dev/minScale: "2" autoscaling.knative.dev/maxScale: "10" ---- Using these annotations in the revision template will propagate this confguration to `PodAutoscaler` objects. [NOTE] ==== These annotations apply for the full lifetime of a revision. Even when a revision is not referenced by any route, the minimal Pod count specified by `minScale` will still be provided. Keep in mind that non-routeable revisions may be garbage collected, which enables Knative to reclaim the resources. ==== ```
07e6517c-273c-4a1a-9b5a-ca230b28526b
{ "language": "AsciiDoc" }
```asciidoc = Packetbeat reference :libbeat: http://www.elastic.co/guide/en/beats/libbeat/1.0.0-rc1 :version: 1.0.0-rc1 include::./overview.asciidoc[] include::./gettingstarted.asciidoc[] include::./configuration.asciidoc[] include::./command-line.asciidoc[] include::./capturing.asciidoc[] include::./https.asciidoc[] include::./fields.asciidoc[] include::./thrift.asciidoc[] include::./windows.asciidoc[] include::./kibana3.asciidoc[] include::./filtering.asciidoc[] include::./troubleshooting.asciidoc[] include::./new_protocol.asciidoc[] ``` Use master version in docs
```asciidoc = Packetbeat reference :libbeat: http://www.elastic.co/guide/en/beats/libbeat/master :version: master include::./overview.asciidoc[] include::./gettingstarted.asciidoc[] include::./configuration.asciidoc[] include::./command-line.asciidoc[] include::./capturing.asciidoc[] include::./https.asciidoc[] include::./fields.asciidoc[] include::./thrift.asciidoc[] include::./windows.asciidoc[] include::./kibana3.asciidoc[] include::./filtering.asciidoc[] include::./troubleshooting.asciidoc[] include::./new_protocol.asciidoc[] ```
b09445f2-674c-496c-8fb6-e01ea6754e4b
{ "language": "AsciiDoc" }
```asciidoc = AsciiBinder image:https://badge.fury.io/rb/ascii_binder.svg["Gem Version", link="https://badge.fury.io/rb/ascii_binder"] AsciiBinder is an AsciiDoc-based system for authoring and publishing closely related documentation sets from a single source. == Learn More * See the http://www.asciibinder.org[homepage]. * Have a gander at the http://www.asciibinder.org/latest/welcome/[AsciiBinder documentation]. * Or just take the https://rubygems.org/gems/ascii_binder[ascii_binder Ruby Gem] for a spin. The AsciiBinder system was initially developed for https://github.com/openshift/openshift-docs[OpenShift documentation], but has been revised to work for documenting a wide variety of complex, multi-versioned software projects. == Contributing We are using the https://github.com/redhataccess/ascii_binder/issues[Issues] page to track bugs and feature ideas on the code, so have a look and feel free to ask questions there. You can also chat with us on IRC at FreeNode, http://webchat.freenode.net/?randomnick=1&channels=asciibinder&uio=d4[#asciibinder] channel, or on Twitter - https://twitter.com/AsciiBinder[@AsciiBinder]. == License The gem is available as open source under the terms of the http://opensource.org/licenses/MIT[MIT License]. ``` Update docs link to point to docs repo
```asciidoc = AsciiBinder image:https://badge.fury.io/rb/ascii_binder.svg["Gem Version", link="https://badge.fury.io/rb/ascii_binder"] AsciiBinder is an AsciiDoc-based system for authoring and publishing closely related documentation sets from a single source. == Learn More * Have a gander at the https://github.com/redhataccess/ascii_binder-docs/blob/master/welcome/index.adoc[AsciiBinder documentation]. * Or just take the https://rubygems.org/gems/ascii_binder[ascii_binder Ruby Gem] for a spin. The AsciiBinder system was initially developed for https://github.com/openshift/openshift-docs[OpenShift documentation], but has been revised to work for documenting a wide variety of complex, multi-versioned software projects. == Contributing We are using the https://github.com/redhataccess/ascii_binder/issues[Issues] page to track bugs and feature ideas on the code, so have a look and feel free to ask questions there. You can also chat with us on IRC at FreeNode, http://webchat.freenode.net/?randomnick=1&channels=asciibinder&uio=d4[#asciibinder] channel, or on Twitter - https://twitter.com/AsciiBinder[@AsciiBinder]. == License The gem is available as open source under the terms of the http://opensource.org/licenses/MIT[MIT License]. ```
8225b4d7-9aaa-42a4-af86-99315141d94f
{ "language": "AsciiDoc" }
```asciidoc :figure-caption!: image::https://travis-ci.org/mmjmanders/ng-iban.svg?branch=master[title="travis status", alt="travis status", link="https://travis-ci.org/mmjmanders/ng-iban"] image::https://app.wercker.com/status/eb4337041c62e162c5dd7af43122647c/m[title="wercker status", alt="wercker status", link="https://app.wercker.com/project/bykey/eb4337041c62e162c5dd7af43122647c"] = ng-iban - validate input fields as IBAN The goal is to provide an easy way to validate an input field as an IBAN number with https://angularjs.org/[AngularJS]. From version `0.4.0` the module uses https://github.com/arhs/iban.js[iban.js] for validation. == Usage First add * `AngularJS` * `ng-iban` to your HTML file. Make sure you require `mm.iban` as a dependency of your AngularJS module. == Installation `bower install ng-iban` === directive [source,html] ---- <input type="text" ng-model="iban" ng-iban/> ---- To use this directive the `ngModel` directive must also be used because this directive depends on it. ``` Update documentation for NPM support
```asciidoc :figure-caption!: image::https://travis-ci.org/mmjmanders/ng-iban.svg?branch=master[title="travis status", alt="travis status", link="https://travis-ci.org/mmjmanders/ng-iban"] image::https://app.wercker.com/status/eb4337041c62e162c5dd7af43122647c/m[title="wercker status", alt="wercker status", link="https://app.wercker.com/project/bykey/eb4337041c62e162c5dd7af43122647c"] = ng-iban - validate input fields as IBAN The goal is to provide an easy way to validate an input field as an IBAN number with https://angularjs.org/[AngularJS]. From version `0.4.0` the module uses https://github.com/arhs/iban.js[iban.js] for validation. == Installation === Bower `bower install ng-iban` === NPM `npm install ng-iban` === Other Download file `dist/ng-iban.mni.js`. == Usage Add `mm.iban` as a dependency of your AngularJS module. === directive [source,html] ---- <input type="text" ng-model="iban" ng-iban/> ---- To use this directive the `ngModel` directive must also be used because this directive depends on it. ```
5dd427a2-fdb1-4db6-90e0-0398501996b9
{ "language": "AsciiDoc" }
```asciidoc = Infinispan Cluster Manager image:https://vertx.ci.cloudbees.com/buildStatus/icon?job=vert.x3-infinispan["Build Status",link="https://vertx.ci.cloudbees.com/view/vert.x-3/job/vert.x3-infinispan/"] This is a cluster manager implementation for Vert.x that uses http://infinispan.org[Infinispan]. Please see the in-source asciidoc documentation or the main documentation on the web-site for a full description of this component: * link:http://vertx.io/docs/vertx-infinispan/java/[web-site docs] * link:src/main/asciidoc/java/index.adoc[in-source docs] ``` Revert "Revert "Revert "Revert "Test trigger on push""""
```asciidoc = Infinispan Cluster Manager image:https://vertx.ci.cloudbees.com/buildStatus/icon?job=vert.x3-infinispan["Build Status",link="https://vertx.ci.cloudbees.com/view/vert.x-3/job/vert.x3-infinispan/"] This is a cluster manager implementation for Vert.x that uses http://infinispan.org[Infinispan]. Please see the in-source asciidoc documentation or the main documentation on the web-site for a full description of this component: * link:http://vertx.io/docs/vertx-infinispan/java/[web-site docs] * link:src/main/asciidoc/java/index.adoc[in-source docs] -- will remove -- ```
1b68553e-b8f1-40f4-a3e4-57b1abac930c
{ "language": "AsciiDoc" }
```asciidoc [[validator-further-reading]] == Further reading Last but not least, a few pointers to further information. A great source for examples is the Bean Validation TCK which is available for anonymous access on https://github.com/beanvalidation/beanvalidation-tck/[GitHub]. In particular the TCK's https://github.com/beanvalidation/beanvalidation-tck/tree/master/tests[tests] might be of interest. {bvSpecUrl}[The JSR 380] specification itself is also a great way to deepen your understanding of Bean Validation and Hibernate Validator. If you have any further questions about Hibernate Validator or want to share some of your use cases, have a look at the http://community.jboss.org/en/hibernate/validator[Hibernate Validator Wiki], the https://forum.hibernate.org/viewforum.php?f=9[Hibernate Validator Forum] and the https://stackoverflow.com/questions/tagged/hibernate-validator[Hibernate Validator tag on Stack Overflow]. In case you would like to report a bug use https://hibernate.atlassian.net/projects/HV/[Hibernate's Jira] instance. Feedback is always welcome! ``` Update the links to the forum
```asciidoc [[validator-further-reading]] == Further reading Last but not least, a few pointers to further information. A great source for examples is the Bean Validation TCK which is available for anonymous access on https://github.com/beanvalidation/beanvalidation-tck/[GitHub]. In particular the TCK's https://github.com/beanvalidation/beanvalidation-tck/tree/master/tests[tests] might be of interest. {bvSpecUrl}[The JSR 380] specification itself is also a great way to deepen your understanding of Bean Validation and Hibernate Validator. If you have any further questions about Hibernate Validator or want to share some of your use cases, have a look at the http://community.jboss.org/en/hibernate/validator[Hibernate Validator Wiki], the https://discourse.hibernate.org/c/hibernate-validator[Hibernate Validator Forum] and the https://stackoverflow.com/questions/tagged/hibernate-validator[Hibernate Validator tag on Stack Overflow]. In case you would like to report a bug use https://hibernate.atlassian.net/projects/HV/[Hibernate's Jira] instance. Feedback is always welcome! ```
6264b06f-ff6a-4b93-ad05-898e5ecbfd0c
{ "language": "AsciiDoc" }
```asciidoc == Introduction to Regression Test Mode Bitcoin 0.9 and later include support for Regression Test Mode (aka RegTest mode). RegTest mode creates a single node Bitcoin "network" that can confirm blocks upon command. (RegTest mode can also be used to create small, multi-node networks and even to simulate blockchain reorganizations.) For example the following command will generate 101 blocks ./bitcoin-cli -regtest setgenerate true 101 And yes, you get the newly mined coins. They can't be spent anywhere, but they're great for testing. The best documentation of RegTest mode that I've seen so far is https://bitcoinj.github.io/testing[How to test applications] on the new https://bitcoinj.github.io[Bitcoinj website]. Other Links:: * http://geraldkaszuba.com/creating-your-own-experimental-bitcoin-network/[Creating your own experimental Bitcoin network] * https://github.com/gak/docker-bitcoin-regtest[docker-bitcoin-regtest] == Simple Demo of RegTest mode with Bash Scripts These are some really rough Bash scripts that can drive bitcoind in RegTest mode. Procedure:: * Make sure Bitcoin Core 0.9 or later is installed and in your path. * Run the server script ./server.sh & * Run the client setup script to mine some coins to get started: ./setup-client.sh * Run the client script (repeat as desired) ./client.sh * A directory named +regtest-datadir+ is created in the current directory. ``` Remove reference to deleted scripts.
```asciidoc == Introduction to Regression Test Mode Bitcoin 0.9 and later include support for Regression Test Mode (aka RegTest mode). RegTest mode creates a single node Bitcoin "network" that can confirm blocks upon command. (RegTest mode can also be used to create small, multi-node networks and even to simulate blockchain reorganizations.) For example the following command will generate 101 blocks ./bitcoin-cli -regtest setgenerate true 101 And yes, you get the newly mined coins. They can't be spent anywhere, but they're great for testing. The best documentation of RegTest mode that I've seen so far is https://bitcoinj.github.io/testing[How to test applications] on the new https://bitcoinj.github.io[Bitcoinj website]. Other Links:: * http://geraldkaszuba.com/creating-your-own-experimental-bitcoin-network/[Creating your own experimental Bitcoin network] * https://github.com/gak/docker-bitcoin-regtest[docker-bitcoin-regtest] ```
0d0418f7-34fd-4579-8b32-498d10a87b53
{ "language": "AsciiDoc" }
```asciidoc // Copyright 2017 the original author or authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. = Documentation licenses [[sec:gradle_documentation]] == Gradle Documentation _Copyright © 2007-2018 Gradle, Inc._ Gradle build tool source code is open and licensed under the link:https://github.com/gradle/gradle/blob/master/LICENSE[Apache License 2.0]. Gradle user manual and DSL references are licensed under link:http://creativecommons.org/licenses/by-nc-sa/4.0/[Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License]. ``` Add license information to docs clarifying build scan plugin license
```asciidoc // Copyright 2017 the original author or authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. [[licenses]] = License Information [[sec:gradle_documentation]] == Gradle Documentation _Copyright © 2007-2018 Gradle, Inc._ Gradle build tool source code is open-source and licensed under the link:https://github.com/gradle/gradle/blob/master/LICENSE[Apache License 2.0]. Gradle user manual and DSL references are licensed under link:http://creativecommons.org/licenses/by-nc-sa/4.0/[Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License]. [[licenses:build_scan_plugin]] == Gradle Build Scan Plugin Use of the link:https://scans.gradle.com/plugin/[build scan plugin] is subject to link:https://gradle.com/legal/terms-of-service/[Gradle's Terms of Service]. ```
765ca276-d312-4267-9a77-8d99aa151368
{ "language": "AsciiDoc" }
```asciidoc [id='monitoring'] = Monitoring include::modules/common-attributes.adoc[] :context: monitoring toc::[] {product-title} uses the Prometheus open source monitoring system. The stack built around Prometheus provides {product-title} cluster monitoring by default. It also provides custom-configured application monitoring as a technology preview. The cluster monitoring stack is only supported for monitoring {product-title} clusters. [id='cluster-monitoring'] == Cluster monitoring include::modules/monitoring-monitoring-overview.adoc[leveloffset=+1] include::monitoring/configuring-monitoring-stack.adoc[leveloffset=+1] include::modules/monitoring-configuring-etcd-monitoring.adoc[leveloffset=+1] include::modules/monitoring-accessing-prometheus-alertmanager-grafana.adoc[leveloffset=+1] [id='application-monitoring'] == Application monitoring You can do custom metrics scraping for your applications. This is done using the Prometheus Operator and a custom Prometheus instance. [IMPORTANT] ==== Application monitoring is a technology preview. Exposing custom metrics will change without the consent of the user of the cluster. ==== include::modules/monitoring-configuring-cluster-for-application-monitoring.adoc[leveloffset=+1] include::modules/monitoring-configuring-monitoring-for-application.adoc[leveloffset=+1] include::modules/monitoring-exposing-application-metrics-for-horizontal-pod-autoscaling.adoc[leveloffset=+1] ``` Remove the deprecated etcd section
```asciidoc [id='monitoring'] = Monitoring include::modules/common-attributes.adoc[] :context: monitoring toc::[] {product-title} uses the Prometheus open source monitoring system. The stack built around Prometheus provides {product-title} cluster monitoring by default. It also provides custom-configured application monitoring as a technology preview. The cluster monitoring stack is only supported for monitoring {product-title} clusters. [id='cluster-monitoring'] == Cluster monitoring include::modules/monitoring-monitoring-overview.adoc[leveloffset=+1] include::monitoring/configuring-monitoring-stack.adoc[leveloffset=+1] include::modules/monitoring-accessing-prometheus-alertmanager-grafana.adoc[leveloffset=+1] [id='application-monitoring'] == Application monitoring You can do custom metrics scraping for your applications. This is done using the Prometheus Operator and a custom Prometheus instance. [IMPORTANT] ==== Application monitoring is a technology preview. Exposing custom metrics will change without the consent of the user of the cluster. ==== include::modules/monitoring-configuring-cluster-for-application-monitoring.adoc[leveloffset=+1] include::modules/monitoring-configuring-monitoring-for-application.adoc[leveloffset=+1] include::modules/monitoring-exposing-application-metrics-for-horizontal-pod-autoscaling.adoc[leveloffset=+1] ```
7ffb195c-9ff1-4088-865f-5f1ebaadffa3
{ "language": "AsciiDoc" }
```asciidoc // Module included in the following assemblies: // // * cli_reference/openshift_developer_cli/installing-odo.adoc [id="installing-odo-on-linux"] = Installing {odo-title} on Linux == Binary installation ---- # curl -L https://mirror.openshift.com/pub/openshift-v4/clients/odo/latest/odo-darwin-amd64 -o /usr/local/bin/odo # chmod +x /usr/local/bin/odo ---- == Tarball installation ---- # sh -c 'curl -L https://mirror.openshift.com/pub/openshift-v4/clients/odo/latest/odo-linux-amd64.tar.gz | gzip -d > /usr/local/bin/odo' # chmod +x /usr/local/bin/odo ----``` Fix url to odo linux binary
```asciidoc // Module included in the following assemblies: // // * cli_reference/openshift_developer_cli/installing-odo.adoc [id="installing-odo-on-linux"] = Installing {odo-title} on Linux == Binary installation ---- # curl -L https://mirror.openshift.com/pub/openshift-v4/clients/odo/latest/odo-linux-amd64 -o /usr/local/bin/odo # chmod +x /usr/local/bin/odo ---- == Tarball installation ---- # sh -c 'curl -L https://mirror.openshift.com/pub/openshift-v4/clients/odo/latest/odo-linux-amd64.tar.gz | gzip -d > /usr/local/bin/odo' # chmod +x /usr/local/bin/odo ----```
fd4551fa-42bc-48a0-b048-ed44885b0674
{ "language": "AsciiDoc" }
```asciidoc ifdef::env-github[] :imagesdir: https://github.com/Moose2Model/Moose2Model/blob/master/Documentation/images/ endif::[] :toc: :toc-placement!: toc::[] This documentation follows the arc42 template for architecture documentation (https://arc42.org/). 1 Introduction and Goals ======================== 1.1 Requirements Overview ------------------------- - Provide diagrams for developers that can be easily kept correct - Reduces the cognitive load of developers who work in complex software systems - Supports only diagram that show the dependencies between components of a software - Supports dependencies between entities of code that are more detailed then a class (method, attribute, ...) - Works in the moment best with models that are extracted by the SAP2Moose project - Shall support all models that are compatible with Moose (http://moosetechnology.org/) 1.2 Quality Goals ----------------- - Shows all dependencies between elements in a diagram - Shows all elements that should be in a diagram 1.3 Stake Holders ----------------- .Stake Holders |=== | Role/Name |Expectations |Developer |Build diagrams where the customiziation is not lost when they are regenerated with new informations. Build diagrams that are sufficiently detailed to support the development. |Software Architect |Have a tool to compare the planned architecture with the realized architecture |=== ``` Add arc 42 Architecture Constraints
```asciidoc ifdef::env-github[] :imagesdir: https://github.com/Moose2Model/Moose2Model/blob/master/Documentation/images/ endif::[] :toc: :toc-placement!: toc::[] This documentation follows the arc42 template for architecture documentation (https://arc42.org/). 1 Introduction and Goals ======================== 1.1 Requirements Overview ------------------------- - Provide diagrams for developers that can be easily kept correct - Reduces the cognitive load of developers who work in complex software systems - Supports only diagram that show the dependencies between components of a software - Supports dependencies between entities of code that are more detailed then a class (method, attribute, ...) - Works in the moment best with models that are extracted by the SAP2Moose project - Shall support all models that are compatible with Moose (http://moosetechnology.org/) 1.2 Quality Goals ----------------- - Shows all dependencies between elements in a diagram - Shows all elements that should be in a diagram 1.3 Stake Holders ----------------- .Stake Holders |=== | Role/Name |Expectations |Developer |Build diagrams where the customiziation is not lost when they are regenerated with new informations. Build diagrams that are sufficiently detailed to support the development. |Software Architect |Have a tool to compare the planned architecture with the realized architecture |=== 2 Architecture Constraints ========================== - Easy to install ```
26e14994-1595-4e6b-ab4c-136d1814bb06
{ "language": "AsciiDoc" }
```asciidoc = Forum (free support) :awestruct-layout: base :showtitle: == Usage questions If you have a question about OptaPlanner, just ask our friendly community: * Ask on http://www.jboss.org/drools/lists[the Drools user mailing list] (recommended). * Or ask on http://stackoverflow.com/questions/tagged/optaplanner[StackOverflow]. Please follow these recommendations when posting a question: * Get to the point. Keep it as short as possible. * Include _relevant_ technical details (stacktrace, short code snippet, ...). * Be polite, friendly and clear. Reread your question before posting it. * Be patient. This isn't link:product.html[paid support]. == Development discussions If you've link:../code/sourceCode.html[build OptaPlanner from source] and you would like to improve it, then come talk with us: * Join us on http://www.jboss.org/drools/lists[the Drools developer mailing list]. * And link:chat.html[chat with us] (recommended). ``` Split optaplanner's dev list away from drools's mailing list
```asciidoc = Forum :awestruct-layout: base :showtitle: == Usage questions If you have a question about OptaPlanner, just ask our friendly community: * *http://stackoverflow.com/questions/tagged/optaplanner[Ask a usage question on StackOverflow.]* * To start a discussion, use https://groups.google.com/forum/#!forum/optaplanner-dev[the OptaPlanner developer forum]. Please follow these recommendations when posting a question or a discussion: * Get to the point. Keep it as short as possible. * Include _relevant_ technical details (stacktrace, short code snippet, ...). * Be polite, friendly and clear. Reread your question before posting it. * Be patient. This isn't link:product.html[paid support]. == Development discussions If you've link:../code/sourceCode.html[build OptaPlanner from source] and want to improve something, come talk with us: * Join https://groups.google.com/forum/#!forum/optaplanner-dev[the OptaPlanner developer google group]. ** Mail directly to the group via mailto:[email protected][[email protected]]. * And link:chat.html[chat with us] (recommended). ** Or ping us on link:socialMedia.html[any social media]. ```
5d93afe6-11e7-413d-abcb-c58597451ff9
{ "language": "AsciiDoc" }
```asciidoc [[apim-kubernetes-overview]] = Kubernetes plugin :page-sidebar: apim_3_x_sidebar :page-permalink: apim/3.x/apim_kubernetes_overview.html :page-folder: apim/kubernetes :page-layout: apim3x :page-liquid: [label label-version]#New in version 3.7# == Overview APIM 3.7.0 introduces a Kubernetes plugin for APIM Gateway allowing the deployment of APIs using https://kubernetes.io/docs/concepts/extend-kubernetes/api-extension/custom-resources/[Custom Resource Definitions (CRDs)^]. APIs deployed using CRDs are not visible through APIM Console. WARNING: This plugin is currently in Alpha version; the API key policy isn't available for APIs deployed using this plugin. You can find more detailed information about the plugin in the following sections: * link:/apim/3.x/apim_kubernetes_quick_start.html[Quick Start] * link:/apim/3.x/apim_kubernetes_installation.html[How to install] * link:/apim/3.x/apim_kubernetes_custom_resources.html[Custom Resources] * link:/apim/3.x/apim_kubernetes_admission_hook.html[Admission hook] ``` Fix APIM version to 3.13
```asciidoc [[apim-kubernetes-overview]] = Kubernetes plugin :page-sidebar: apim_3_x_sidebar :page-permalink: apim/3.x/apim_kubernetes_overview.html :page-folder: apim/kubernetes :page-layout: apim3x :page-liquid: [label label-version]#New in version 3.13# == Overview APIM 3.13 introduces a Kubernetes plugin for APIM Gateway allowing the deployment of APIs using https://kubernetes.io/docs/concepts/extend-kubernetes/api-extension/custom-resources/[Custom Resource Definitions (CRDs)^]. APIs deployed using CRDs are not visible through APIM Console. WARNING: This plugin is currently in Alpha version; the API key policy isn't available for APIs deployed using this plugin. You can find more detailed information about the plugin in the following sections: * link:/apim/3.x/apim_kubernetes_quick_start.html[Quick Start] * link:/apim/3.x/apim_kubernetes_installation.html[How to install] * link:/apim/3.x/apim_kubernetes_custom_resources.html[Custom Resources] * link:/apim/3.x/apim_kubernetes_admission_hook.html[Admission hook] ```
bc124938-7615-404d-af2e-82ef23eaeee2
{ "language": "AsciiDoc" }
```asciidoc = CLI Connector Component :doctitle: CLI Connector :shortname: cli-connector :artifactid: camel-cli-connector :description: Runtime adapter connecting with Camel CLI :since: 3.19 :supportlevel: Preview *Since Camel {since}* The camel-cli-connector allows the Camel CLI to be able to manage running Camel integrations. Currently, only a local connector is provided, which means that the Camel CLI can only be managing local running Camel integrations. These integrations can be using different runtimes such as Camel Main, Camel Spring Boot or Camel Quarkus etc. == Auto-detection from classpath To use this implementation all you need to do is to add the `camel-cli-connector` dependency to the classpath, and Camel should auto-detect this on startup and log as follows: [source,text] ---- Local CLI Connector started ---- ``` Add spring-boot link in doc
```asciidoc = CLI Connector Component :doctitle: CLI Connector :shortname: cli-connector :artifactid: camel-cli-connector :description: Runtime adapter connecting with Camel CLI :since: 3.19 :supportlevel: Preview //Manually maintained attributes :camel-spring-boot-name: cli-connector *Since Camel {since}* The camel-cli-connector allows the Camel CLI to be able to manage running Camel integrations. Currently, only a local connector is provided, which means that the Camel CLI can only be managing local running Camel integrations. These integrations can be using different runtimes such as Camel Main, Camel Spring Boot or Camel Quarkus etc. == Auto-detection from classpath To use this implementation all you need to do is to add the `camel-cli-connector` dependency to the classpath, and Camel should auto-detect this on startup and log as follows: [source,text] ---- Local CLI Connector started ---- include::spring-boot:partial$starter.adoc[] ```
f3c42560-c4c2-45bb-8a20-bae53e079084
{ "language": "AsciiDoc" }
```asciidoc [[release-notes]] == Release Notes :numbered!: include::release-notes-5.0.0-ALPHA.adoc[] include::release-notes-5.0.0-M1.adoc[] include::release-notes-5.0.0-M2.adoc[] include::release-notes-5.0.0-M3.adoc[] include::release-notes-5.0.0-M4.adoc[] include::release-notes-5.0.0-M5.adoc[] include::release-notes-5.0.0-M6.adoc[] include::release-notes-5.0.0-RC1.adoc[] include::release-notes-5.0.0-RC2.adoc[] include::release-notes-5.0.0-RC3.adoc[] include::release-notes-5.0.0.adoc[] include::release-notes-5.1.0-M1.adoc[] :numbered: ``` Order release notes from newest to oldest
```asciidoc [[release-notes]] == Release Notes :numbered!: include::release-notes-5.1.0-M1.adoc[] include::release-notes-5.0.0.adoc[] include::release-notes-5.0.0-RC3.adoc[] include::release-notes-5.0.0-RC2.adoc[] include::release-notes-5.0.0-RC1.adoc[] include::release-notes-5.0.0-M6.adoc[] include::release-notes-5.0.0-M5.adoc[] include::release-notes-5.0.0-M4.adoc[] include::release-notes-5.0.0-M3.adoc[] include::release-notes-5.0.0-M2.adoc[] include::release-notes-5.0.0-M1.adoc[] include::release-notes-5.0.0-ALPHA.adoc[] :numbered: ```
48e9e065-3d52-4b4c-b461-ac742697c7fe
{ "language": "AsciiDoc" }
```asciidoc == Fusioninventory Plugin https://coveralls.io/r/fusioninventory/fusioninventory-for-glpi[image:https://coveralls.io/repos/fusioninventory/fusioninventory-for-glpi/badge.svg] This plugin makes GLPI to process various types of tasks for Fusioninventory agents: * Computer inventory * Network discovery * Network (SNMP) inventory * Software deployment * VMWare ESX host remote inventory For further information and documentation, please check http://www.fusioninventory.org . If you want to report bugs or check for development status, you can check http://forge.fusioninventory.org . == Third-party code * PluginFusioninventoryFindFiles() is copyright http://rosettacode.org/wiki/Walk_a_directory/Recursively#PHP[rosettacode.org] and made available under GNU Free Documentation License. == Third-party icons and images Some icons used in the project comes from the following set of graphics licensed: * Dortmund is copyright by http://pc.de/icons/[PC.DE] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. * Fugue Icons is copyright by http://p.yusukekamiyamane.com/[Yusuke Kamiyamame] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. ``` Add travis badge + fix coverage badge
```asciidoc == Fusioninventory Plugin image:https://travis-ci.org/fusioninventory/fusioninventory-for-glpi.svg?branch=master["Build Status", link="https://travis-ci.org/fusioninventory/fusioninventory-for-glpi"] image:https://coveralls.io/repos/fusioninventory/fusioninventory-for-glpi/badge.svg["Coverage Status", link="https://coveralls.io/r/fusioninventory/fusioninventory-for-glpi"] This plugin makes GLPI to process various types of tasks for Fusioninventory agents: * Computer inventory * Network discovery * Network (SNMP) inventory * Software deployment * VMWare ESX host remote inventory For further information and documentation, please check http://www.fusioninventory.org . If you want to report bugs or check for development status, you can check http://forge.fusioninventory.org . == Third-party code * PluginFusioninventoryFindFiles() is copyright http://rosettacode.org/wiki/Walk_a_directory/Recursively#PHP[rosettacode.org] and made available under GNU Free Documentation License. == Third-party icons and images Some icons used in the project comes from the following set of graphics licensed: * Dortmund is copyright by http://pc.de/icons/[PC.DE] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. * Fugue Icons is copyright by http://p.yusukekamiyamane.com/[Yusuke Kamiyamame] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. ```
c330a932-f2d7-4d38-bc78-0b4191adda7a
{ "language": "AsciiDoc" }
```asciidoc == Fusioninventory Plugin https://coveralls.io/r/fusioninventory/fusioninventory-for-glpi[image:https://coveralls.io/repos/fusioninventory/fusioninventory-for-glpi/badge.svg] This plugin makes GLPI to process various types of tasks for Fusioninventory agents: * Computer inventory * Network discovery * Network (SNMP) inventory * Software deployment * VMWare ESX host remote inventory For further information and documentation, please check http://www.fusioninventory.org . If you want to report bugs or check for development status, you can check http://forge.fusioninventory.org . == Third-party code * PluginFusioninventoryFindFiles() is copyright http://rosettacode.org/wiki/Walk_a_directory/Recursively#PHP[rosettacode.org] and made available under GNU Free Documentation License. == Third-party icons and images Some icons used in the project comes from the following set of graphics licensed: * Dortmund is copyright by http://pc.de/icons/[PC.DE] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. * Fugue Icons is copyright by http://p.yusukekamiyamane.com/[Yusuke Kamiyamame] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. ``` Add travis badge + fix coverage badge
```asciidoc == Fusioninventory Plugin image:https://travis-ci.org/fusioninventory/fusioninventory-for-glpi.svg?branch=master["Build Status", link="https://travis-ci.org/fusioninventory/fusioninventory-for-glpi"] image:https://coveralls.io/repos/fusioninventory/fusioninventory-for-glpi/badge.svg["Coverage Status", link="https://coveralls.io/r/fusioninventory/fusioninventory-for-glpi"] This plugin makes GLPI to process various types of tasks for Fusioninventory agents: * Computer inventory * Network discovery * Network (SNMP) inventory * Software deployment * VMWare ESX host remote inventory For further information and documentation, please check http://www.fusioninventory.org . If you want to report bugs or check for development status, you can check http://forge.fusioninventory.org . == Third-party code * PluginFusioninventoryFindFiles() is copyright http://rosettacode.org/wiki/Walk_a_directory/Recursively#PHP[rosettacode.org] and made available under GNU Free Documentation License. == Third-party icons and images Some icons used in the project comes from the following set of graphics licensed: * Dortmund is copyright by http://pc.de/icons/[PC.DE] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. * Fugue Icons is copyright by http://p.yusukekamiyamane.com/[Yusuke Kamiyamame] and made available under a http://creativecommons.org/licenses/by/3.0/deed[Creative Commons Attribution 3.0 License]. ```
4443490b-d7f3-4aee-8215-03042cd22a33
{ "language": "AsciiDoc" }
```asciidoc = Hawkular Android Client This repository contains the source code for the Hawkular Android application. == License * http://www.apache.org/licenses/LICENSE-2.0.html[Apache Version 2.0] == Building ifdef::env-github[] [link=https://travis-ci.org/hawkular/hawkular-android-client] image:https://travis-ci.org/hawkular/hawkular-android-client.svg["Build Status", link="https://travis-ci.org/hawkular/hawkular-android-client"] endif::[] You will need JDK 1.7+ installed. Gradle, Android SDK and all dependencies will be downloaded automatically. ---- $ ./gradlew clean assembleDebug ---- ``` Update readme with Wiki reference.
```asciidoc = Hawkular Android Client This repository contains the source code for the Hawkular Android application. == License * http://www.apache.org/licenses/LICENSE-2.0.html[Apache Version 2.0] == Building ifdef::env-github[] [link=https://travis-ci.org/hawkular/hawkular-android-client] image:https://travis-ci.org/hawkular/hawkular-android-client.svg["Build Status", link="https://travis-ci.org/hawkular/hawkular-android-client"] endif::[] You will need JDK 1.7+ installed. Gradle, Android SDK and all dependencies will be downloaded automatically. ---- $ ./gradlew clean assembleDebug ---- == Reading There are some documents on the link:../../wiki[Wiki], including API overview, UI mockups and instructions on running necessary servers for using the client in common and push notifications specifically. ```
82d1d537-a7d9-4deb-beed-5a07c825e0d4
{ "language": "AsciiDoc" }
```asciidoc = Spring Boot and Two DataSources This project demonstrates how to use two `DataSource` s with Spring Boot 2.0. It utilizes: * Spring Data https://github.com/spring-projects/spring-data-jpa[JPA] / https://github.com/spring-projects/spring-data-rest[REST] * https://github.com/flyway/flyway[Flyway] migrations for the two `DataSource` s * Separate Hibernate properties for each `DataSource` defined in the application.yml * https://github.com/thymeleaf/thymeleaf[Thymeleaf] 3 * https://github.com/DataTables/DataTablesSrc[DataTables] * Unit tests for components Note: It may take a few seconds for the app to start if no one has not accessed it recently``` Adjust readme to Spring Boot 2.1
```asciidoc = Spring Boot and Two DataSources This project demonstrates how to use two `DataSource` s with Spring Boot 2.1. It utilizes: * Spring Data https://github.com/spring-projects/spring-data-jpa[JPA] / https://github.com/spring-projects/spring-data-rest[REST] * https://github.com/flyway/flyway[Flyway] migrations for the two `DataSource` s * Separate Hibernate properties for each `DataSource` defined in the application.yml * https://github.com/thymeleaf/thymeleaf[Thymeleaf] 3 * https://github.com/DataTables/DataTablesSrc[DataTables] * Unit tests for components Note: It may take a few seconds for the app to start if no one has not accessed it recently```
b347801f-e86b-4f35-88a7-345ab19383a2
{ "language": "AsciiDoc" }
```asciidoc = Snoop - A Discovery Service for Java EE Snoop is an experimental registration and discovery service for Java EE based microservices. == Getting Started . Start the link:snoop-service.adoc[Snoop Service] . link:service-registration.adoc[Service Registration] . link:service-discovery.adoc[Service Discovery] == Maven . Released artifacts are available in link:http://search.maven.org/#search%7Cga%7C1%7Csnoop[Maven Central] . Snapshots configuration: <repositories> <repository> <id>agilejava-snapshots</id> <url>http://nexus.agilejava.eu/content/groups/public</url> <snapshots> <enabled>true</enabled> </snapshots> </repository> </repositories> == Examples - link:https://github.com/ivargrimstad/snoop-samples[snoop-samples@GitHub] - link:https://github.com/arun-gupta/microservices[https://github.com/arun-gupta/microservices] == FAQ - link:FAQ.adoc[Frequently Asked Questions] ``` Add reference to classloader issue and workaround
```asciidoc = Snoop - A Discovery Service for Java EE Snoop is an experimental registration and discovery service for Java EE based microservices. == Getting Started . Start the link:snoop-service.adoc[Snoop Service] . link:service-registration.adoc[Service Registration] . link:service-discovery.adoc[Service Discovery] == Maven . Released artifacts are available in link:http://search.maven.org/#search%7Cga%7C1%7Csnoop[Maven Central] . Snapshots configuration: <repositories> <repository> <id>agilejava-snapshots</id> <url>http://nexus.agilejava.eu/content/groups/public</url> <snapshots> <enabled>true</enabled> </snapshots> </repository> </repositories> == Classloader Issue - link:classloader-issue.adoc[Description and Workaround] == Examples - link:https://github.com/ivargrimstad/snoop-samples[snoop-samples@GitHub] - link:https://github.com/arun-gupta/microservices[https://github.com/arun-gupta/microservices] == FAQ - link:FAQ.adoc[Frequently Asked Questions] ```
ffb33ffa-105f-463e-bf00-f06278041009
{ "language": "AsciiDoc" }
```asciidoc = CLI Connector Component :doctitle: CLI Connector :shortname: cli-connector :artifactid: camel-cli-connector :description: Runtime adapter connecting with Camel CLI :since: 3.19 :supportlevel: Preview //Manually maintained attributes :camel-spring-boot-name: cli-connector *Since Camel {since}* The camel-cli-connector allows the Camel CLI to be able to manage running Camel integrations. Currently, only a local connector is provided, which means that the Camel CLI can only be managing local running Camel integrations. These integrations can be using different runtimes such as Camel Main, Camel Spring Boot or Camel Quarkus etc. == Auto-detection from classpath To use this implementation all you need to do is to add the `camel-cli-connector` dependency to the classpath, and Camel should auto-detect this on startup and log as follows: [source,text] ---- Local CLI Connector started ---- include::spring-boot:partial$starter.adoc[] ``` Revert "Add spring-boot link in doc"
```asciidoc = CLI Connector Component :doctitle: CLI Connector :shortname: cli-connector :artifactid: camel-cli-connector :description: Runtime adapter connecting with Camel CLI :since: 3.19 :supportlevel: Preview *Since Camel {since}* The camel-cli-connector allows the Camel CLI to be able to manage running Camel integrations. Currently, only a local connector is provided, which means that the Camel CLI can only be managing local running Camel integrations. These integrations can be using different runtimes such as Camel Main, Camel Spring Boot or Camel Quarkus etc. == Auto-detection from classpath To use this implementation all you need to do is to add the `camel-cli-connector` dependency to the classpath, and Camel should auto-detect this on startup and log as follows: [source,text] ---- Local CLI Connector started ---- ```
3202506a-3fbe-4049-bff3-743ef35717d8
{ "language": "AsciiDoc" }
```asciidoc Copyright 2016 higherfrequencytrading.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.``` Migrate to Apache v2.0 license
```asciidoc == Copyright 2016 higherfrequencytrading.com Licensed under the *Apache License, Version 2.0* (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.```
515e7afa-8626-4dda-b948-f79583520891
{ "language": "AsciiDoc" }
```asciidoc [id="olm-webhooks"] = Managing admission webhooks in Operator Lifecycle Manager include::modules/common-attributes.adoc[] :context: olm-webhooks toc::[] Validating and mutating admission webhooks allow Operator authors to intercept, modify, and accept or reject resources before they are handled by the Operator controller. Operator Lifecycle Manager (OLM) can manage the lifecycle of these webhooks when they are shipped alongside your Operator. include::modules/olm-defining-csv-webhooks.adoc[leveloffset=+1] include::modules/olm-webhook-considerations.adoc[leveloffset=+1] [id="olm-webhooks-additional-resources"] == Additional resources * xref:../architecture/admission-plug-ins.adoc#admission-webhook-types_admission-plug-ins[Types of webhook admission plug-ins] ``` Edit to OLM webhook workflow
```asciidoc [id="olm-webhooks"] = Managing admission webhooks in Operator Lifecycle Manager include::modules/common-attributes.adoc[] :context: olm-webhooks toc::[] Validating and mutating admission webhooks allow Operator authors to intercept, modify, and accept or reject resources before they are saved to the object store and handled by the Operator controller. Operator Lifecycle Manager (OLM) can manage the lifecycle of these webhooks when they are shipped alongside your Operator. include::modules/olm-defining-csv-webhooks.adoc[leveloffset=+1] include::modules/olm-webhook-considerations.adoc[leveloffset=+1] [id="olm-webhooks-additional-resources"] == Additional resources * xref:../architecture/admission-plug-ins.adoc#admission-webhook-types_admission-plug-ins[Types of webhook admission plug-ins] ```
41cb4190-0e05-4efd-a777-39801511828a
{ "language": "AsciiDoc" }
```asciidoc [[breaking-changes]] = Breaking changes [partintro] -- This section discusses the changes that you need to be aware of when migrating your application from one version of Elasticsearch to another. As a general rule: * Migration between major versions -- e.g. `1.x` to `2.x` -- requires a <<restart-upgrade,full cluster restart>>. * Migration between minor versions -- e.g. `1.x` to `1.y` -- can be performed by <<rolling-upgrades,upgrading one node at a time>>. See <<setup-upgrade>> for more info. -- include::migrate_3_0.asciidoc[] include::migrate_2_1.asciidoc[] include::migrate_2_2.asciidoc[] include::migrate_2_0.asciidoc[] include::migrate_1_6.asciidoc[] include::migrate_1_4.asciidoc[] include::migrate_1_0.asciidoc[] ``` Fix version order for breaking changes docs
```asciidoc [[breaking-changes]] = Breaking changes [partintro] -- This section discusses the changes that you need to be aware of when migrating your application from one version of Elasticsearch to another. As a general rule: * Migration between major versions -- e.g. `1.x` to `2.x` -- requires a <<restart-upgrade,full cluster restart>>. * Migration between minor versions -- e.g. `1.x` to `1.y` -- can be performed by <<rolling-upgrades,upgrading one node at a time>>. See <<setup-upgrade>> for more info. -- include::migrate_3_0.asciidoc[] include::migrate_2_2.asciidoc[] include::migrate_2_1.asciidoc[] include::migrate_2_0.asciidoc[] include::migrate_1_6.asciidoc[] include::migrate_1_4.asciidoc[] include::migrate_1_0.asciidoc[] ```
ece79c69-780e-4f38-a954-1880aca84ba8
{ "language": "AsciiDoc" }
```asciidoc = Edge Documentation Edge is a starting point for creating Clojure projects. Not sure if Edge is for you? See <<why-edge.adoc#,Why Edge?>>. == Get Started Are you new to Edge? This is the place to start! . link:https://clojure.org/guides/getting_started[Install clj] (<<windows.adoc#,Additional notes for installing on Windows>>) . <<editor.adoc#,Set up your editor for Clojure>> . <<setup.adoc#,Set up Edge for your project>> . <<dev-guide.adoc#,Developing on Edge>> == Using Edge //. Configuration //. Components * <<dev-guide.adoc#,Developing on Edge>> * <<uberjar.adoc#,Producing an Uberjar>> * <<elastic-beanstalk.adoc#,Using the Elastic Beanstalk Quickstart>> * <<socket-repl.adoc#,Setting up a socket REPL>> == The Edge Project * <<why-edge.adoc#,Why Edge?>> * <<guidelines.adoc#,Contributing Guidelines>> //* Getting help //* How to get involved //* License ``` Remove Why Edge? link from preamble
```asciidoc = Edge Documentation Edge is a starting point for creating Clojure projects of all sizes. == Get Started Are you new to Edge? This is the place to start! . link:https://clojure.org/guides/getting_started[Install clj] (<<windows.adoc#,Additional notes for installing on Windows>>) . <<editor.adoc#,Set up your editor for Clojure>> . <<setup.adoc#,Set up Edge for your project>> . <<dev-guide.adoc#,Developing on Edge>> == Using Edge //. Configuration //. Components * <<dev-guide.adoc#,Developing on Edge>> * <<uberjar.adoc#,Producing an Uberjar>> * <<elastic-beanstalk.adoc#,Using the Elastic Beanstalk Quickstart>> * <<socket-repl.adoc#,Setting up a socket REPL>> == The Edge Project * <<why-edge.adoc#,Why Edge?>> * <<guidelines.adoc#,Contributing Guidelines>> //* Getting help //* How to get involved //* License ```
01d6f9cc-18be-437e-a252-ab4c86b0c9e7
{ "language": "AsciiDoc" }
```asciidoc [[overview]] == Overview The goal of this document is to provide comprehensive reference documentation for both programmers writing tests and extension authors. WARNING: Work in progress! === Supported Java Versions JUnit 5 only supports Java 8 and above. However, you can still test classes compiled with lower versions. == Installation Snapshot artifacts are deployed to Sonatype's {snapshot-repo}[snapshots repository]. [[dependency-metadata]] === Dependency Metadata * *Group ID*: `org.junit` * *Version*: `{junit-version}` * *Artifact IDs*: ** `junit-commons` ** `junit-console` ** `junit-engine-api` ** `junit-gradle` ** `junit-launcher` ** `junit4-engine` ** `junit4-runner` ** `junit5-api` ** `junit5-engine` ** `surefire-junit5` See also: {snapshot-repo}/org/junit/ === JUnit 5 Sample Projects The {junit5-samples-repo}[`junit5-samples`] repository hosts a collection of sample projects based on JUnit 5. You'll find the respective `build.gradle` and `pom.xml` in the projects below. * For Gradle, check out the `{junit5-gradle-consumer}` project. * For Maven, check out the `{junit5-maven-consumer}` project. ``` Apply Spotless to User Guide
```asciidoc [[overview]] == Overview The goal of this document is to provide comprehensive reference documentation for both programmers writing tests and extension authors. WARNING: Work in progress! === Supported Java Versions JUnit 5 only supports Java 8 and above. However, you can still test classes compiled with lower versions. == Installation Snapshot artifacts are deployed to Sonatype's {snapshot-repo}[snapshots repository]. [[dependency-metadata]] === Dependency Metadata * *Group ID*: `org.junit` * *Version*: `{junit-version}` * *Artifact IDs*: ** `junit-commons` ** `junit-console` ** `junit-engine-api` ** `junit-gradle` ** `junit-launcher` ** `junit4-engine` ** `junit4-runner` ** `junit5-api` ** `junit5-engine` ** `surefire-junit5` See also: {snapshot-repo}/org/junit/ === JUnit 5 Sample Projects The {junit5-samples-repo}[`junit5-samples`] repository hosts a collection of sample projects based on JUnit 5. You'll find the respective `build.gradle` and `pom.xml` in the projects below. * For Gradle, check out the `{junit5-gradle-consumer}` project. * For Maven, check out the `{junit5-maven-consumer}` project. ```
49653919-fea2-47b6-aa89-4252e2810f9e
{ "language": "AsciiDoc" }
```asciidoc [[structure:Default]] [role=group,includesConstraints="structure:packagesShouldConformToTheMainBuildingBlocks"] All the blackboxes above should correspond to Java packages. Those packages should have no dependencies to other packages outside themselves but for the support or shared package: [[structure:packagesShouldConformToTheMainBuildingBlocks]] [source,cypher,role=constraint,requiresConcepts="structure:configPackages,structure:supportingPackages"] .Top level packages should conform to the main building blocks. ---- MATCH (a:Artifact {type: 'jar'}) MATCH (a) -[:CONTAINS]-> (p1:Package) -[:DEPENDS_ON]-> (p2:Package) <-[:CONTAINS]- (a) WHERE not p1:Config and not (p1) -[:CONTAINS]-> (p2) and not p2:Support and not p1.fqn = 'ac.simons.biking2.summary' RETURN p1, p2 ----``` Use 'Main'-Label instead of type-attribute.
```asciidoc [[structure:Default]] [role=group,includesConstraints="structure:packagesShouldConformToTheMainBuildingBlocks"] All the blackboxes above should correspond to Java packages. Those packages should have no dependencies to other packages outside themselves but for the support or shared package: [[structure:packagesShouldConformToTheMainBuildingBlocks]] [source,cypher,role=constraint,requiresConcepts="structure:configPackages,structure:supportingPackages"] .Top level packages should conform to the main building blocks. ---- MATCH (a:Main:Artifact) MATCH (a) -[:CONTAINS]-> (p1:Package) -[:DEPENDS_ON]-> (p2:Package) <-[:CONTAINS]- (a) WHERE not p1:Config and not (p1) -[:CONTAINS]-> (p2) and not p2:Support and not p1.fqn = 'ac.simons.biking2.summary' RETURN p1, p2 ----```
16980831-8f76-401e-8cd1-44aeaf0da74c
{ "language": "AsciiDoc" }
```asciidoc [[release-notes-5.5.0-M2]] == 5.5.0-M2️ *Date of Release:* ❓ *Scope:* ❓ For a complete list of all _closed_ issues and pull requests for this release, consult the link:{junit5-repo}+/milestone/37?closed=1+[5.5 M2] milestone page in the JUnit repository on GitHub. [[release-notes-5.5.0-M2-junit-platform]] === JUnit Platform ==== Bug Fixes * ❓ ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * ❓ [[release-notes-5.5.0-M2-junit-jupiter]] === JUnit Jupiter ==== Bug Fixes * Parameterized tests no longer throw an `ArrayStoreException` when creating human-readable test names. ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * New `booleans` property in `ValueSource`. [[release-notes-5.5.0-M2-junit-vintage]] === JUnit Vintage ==== Bug Fixes * ❓ ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * ❓ ``` Clean up 5.5 M2 release notes
```asciidoc [[release-notes-5.5.0-M2]] == 5.5.0-M2️ *Date of Release:* ❓ *Scope:* ❓ For a complete list of all _closed_ issues and pull requests for this release, consult the link:{junit5-repo}+/milestone/37?closed=1+[5.5 M2] milestone page in the JUnit repository on GitHub. [[release-notes-5.5.0-M2-junit-platform]] === JUnit Platform ==== Bug Fixes * ❓ ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * ❓ [[release-notes-5.5.0-M2-junit-jupiter]] === JUnit Jupiter ==== Bug Fixes * Parameterized tests no longer throw an `ArrayStoreException` when creating human-readable test names. ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * ❓ [[release-notes-5.5.0-M2-junit-vintage]] === JUnit Vintage ==== Bug Fixes * ❓ ==== Deprecations and Breaking Changes * ❓ ==== New Features and Improvements * ❓ ```
02225a02-77ca-465b-9720-bd10c4b0e798
{ "language": "AsciiDoc" }
```asciidoc [[analysis-keyword-marker-tokenfilter]] === Keyword Marker Token Filter Protects words from being modified by stemmers. Must be placed before any stemming filters. [cols="<,<",options="header",] |======================================================================= |Setting |Description |`keywords` |A list of words to use. |`keywords_path` |A path (either relative to `config` location, or absolute) to a list of words. |`ignore_case` |Set to `true` to lower case all words first. Defaults to `false`. |======================================================================= Here is an example: [source,js] -------------------------------------------------- index : analysis : analyzer : myAnalyzer : type : custom tokenizer : standard filter : [lowercase, protwods, porter_stem] filter : protwods : type : keyword_marker keywords_path : analysis/protwords.txt -------------------------------------------------- ``` Fix typo in sample json
```asciidoc [[analysis-keyword-marker-tokenfilter]] === Keyword Marker Token Filter Protects words from being modified by stemmers. Must be placed before any stemming filters. [cols="<,<",options="header",] |======================================================================= |Setting |Description |`keywords` |A list of words to use. |`keywords_path` |A path (either relative to `config` location, or absolute) to a list of words. |`ignore_case` |Set to `true` to lower case all words first. Defaults to `false`. |======================================================================= Here is an example: [source,js] -------------------------------------------------- index : analysis : analyzer : myAnalyzer : type : custom tokenizer : standard filter : [lowercase, protwords, porter_stem] filter : protwords : type : keyword_marker keywords_path : analysis/protwords.txt -------------------------------------------------- ```
966e93cc-4899-4988-acbf-c8886798cb1e
{ "language": "AsciiDoc" }
```asciidoc = Authors and contributors - Simon Cruanes (`companion_cube`) - Drup (Gabriel Radanne) - Jacques-Pascal Deplaix - Nicolas Braud-Santoni - Whitequark (Peter Zotov) - hcarty (Hezekiah M. Carty) - struktured (Carmelo Piccione) - Bernardo da Costa - Vincent Bernardoff (vbmithr) - Emmanuel Surleau (emm) - Guillaume Bury (guigui) - JP Rodi - Florian Angeletti (@octachron) - Johannes Kloos - Geoff Gole (@gsg) - Roma Sokolov (@little-arhat) - Malcolm Matalka (`orbitz`) - David Sheets (@dsheets) - Glenn Slotte (glennsl) - @LemonBoy - Leonid Rozenberg (@rleonid) - Bikal Gurung (@bikalgurung) - Fabian Hemmer (copy) - Maciej Woś (@lostman) - Orbifx (Stavros Polymenis) - Rand (@rand00) - Dave Aitken (@actionshrimp) - Etienne Millon (@emillon) - Christopher Zimmermann (@madroach) - Jules Aguillon (@julow) ``` Add myself to the authors file
```asciidoc = Authors and contributors - Simon Cruanes (`companion_cube`) - Drup (Gabriel Radanne) - Jacques-Pascal Deplaix - Nicolas Braud-Santoni - Whitequark (Peter Zotov) - hcarty (Hezekiah M. Carty) - struktured (Carmelo Piccione) - Bernardo da Costa - Vincent Bernardoff (vbmithr) - Emmanuel Surleau (emm) - Guillaume Bury (guigui) - JP Rodi - Florian Angeletti (@octachron) - Johannes Kloos - Geoff Gole (@gsg) - Roma Sokolov (@little-arhat) - Malcolm Matalka (`orbitz`) - David Sheets (@dsheets) - Glenn Slotte (glennsl) - @LemonBoy - Leonid Rozenberg (@rleonid) - Bikal Gurung (@bikalgurung) - Fabian Hemmer (copy) - Maciej Woś (@lostman) - Orbifx (Stavros Polymenis) - Rand (@rand00) - Dave Aitken (@actionshrimp) - Etienne Millon (@emillon) - Christopher Zimmermann (@madroach) - Jules Aguillon (@julow) - Metin Akat (@loxs) ```
f7816531-ac3f-4ffa-a28a-cae537795a4f
{ "language": "AsciiDoc" }
```asciidoc = OmniJ Developer's Guide Sean Gilligan v0.1, July 30, 2015: Early draft :numbered: :toc: :toclevels: 3 :linkattrs: Paragraph TBD. == Introduction to OmniJ This section is TBD. For now the project http://github.com/OmniLayer/OmniJ/README.adoc[README] is the best place to get started. == JSON-RPC Clients [plantuml, diagram-classes, svg] .... skinparam packageStyle Rect skinparam shadowing false hide empty members namespace com.msgilligan.bitcoin.rpc { class RPCClient RPCClient <|-- class DynamicRPCClient << Groovy >> RPCClient <|-- BitcoinClient BitcoinClient <|-- class BitcoinCLIClient << Groovy >> } namespace foundation.omni.rpc { com.msgilligan.bitcoin.rpc.BitcoinClient <|-- OmniClient OmniClient <|-- OmniExtendedClient OmniExtendedClient <|-- class OmniCLIClient << Groovy >> } .... ``` Add images directory attribute to devguide
```asciidoc = OmniJ Developer's Guide Sean Gilligan v0.1, July 30, 2015: Early draft :numbered: :toc: :toclevels: 3 :linkattrs: :imagesdir: images Paragraph TBD. == Introduction to OmniJ This section is TBD. For now the project http://github.com/OmniLayer/OmniJ/README.adoc[README] is the best place to get started. == JSON-RPC Clients [plantuml, diagram-classes, svg] .... skinparam packageStyle Rect skinparam shadowing false hide empty members namespace com.msgilligan.bitcoin.rpc { class RPCClient RPCClient <|-- class DynamicRPCClient << Groovy >> RPCClient <|-- BitcoinClient BitcoinClient <|-- class BitcoinCLIClient << Groovy >> } namespace foundation.omni.rpc { com.msgilligan.bitcoin.rpc.BitcoinClient <|-- OmniClient OmniClient <|-- OmniExtendedClient OmniExtendedClient <|-- class OmniCLIClient << Groovy >> } .... ```
3487d39d-208a-40bb-84a0-adf8a64f649e
{ "language": "AsciiDoc" }
```asciidoc = Groundwork :toc: :source-highlighter: pygments link:index.html[back to index page] == Laying the Groundwork To redeploy RecordTrac, you need support from key stakeholders _within_ government. The administrator or elected official in charge of overseeing public records request must agree to use this system, and instruct their colleagues to do so. RecordTrac assumes there is a contact for a given municipality or department within the municipality to handle public records requests. If a government agency has no process at all in place, but is interested in using the system, they could start with one ‘champion’ that is knowledgeable about who has access to different records. The champion can then route requests to the proper parties within government who may have the documents or information a requester needs. == Best Practices to Consider RecordTrac is flexible and could complement almost any governmental agency's process for fulfilling records requests. There are however, best practices a governmental agency should adopt to really leverage the power of RecordTrac. Below is an example lifted from the City of Oakland: * Track all public records requests through RecordTrac, even if you originally received it over the phone, by email, fax, or mail. * Don't reveal sensitive information in your message or upload documents that haven't been thoroughly redacted. Everything you do on the site is immediately viewable to the public. * Upload scanned copies of the [redacted] records online instead of sending the document only to the requester. This prevents you from answering the same public records request multiple times. It also provides proof you responded to the request and exactly what you provided. * Communicate with everyone through RecordTrac. Only take conversations offline if it involves confidential or sensitive information. * Review requests no later than two business days after you receive them. This ensures the person responsible for fulfilling a records request gets it in time if it needs to be re-routed to him or her. ``` Put Best practices in a separate section.
```asciidoc = Groundwork :toc: :source-highlighter: pygments link:index.html[back to index page] == Laying the Groundwork To redeploy RecordTrac, you need support from key stakeholders _within_ government. The administrator or elected official in charge of overseeing public records request must agree to use this system, and instruct their colleagues to do so. RecordTrac assumes there is a contact for a given municipality or department within the municipality to handle public records requests. If a government agency has no process at all in place, but is interested in using the system, they could start with one ‘champion’ that is knowledgeable about who has access to different records. The champion can then route requests to the proper parties within government who may have the documents or information a requester needs. ```
97733960-4656-4184-abcc-2b5b8954f4ec
{ "language": "AsciiDoc" }
```asciidoc = Qprompt == Introduction This project provides a Python 2.x library that allows the user to quickly create CLI prompts for user input. The main features are the following: - Simple multi-entry menus. - Prompt for yes/no response. - Prompt for integer response. - Prompt for float response. - Optional default value. - Optional validity check. - Should work on any platform without additional dependencies. == Status Currently, this project is **under active development**. The contents of the repository should be considered unstable during active development. == Requirements Qprompt should run on any Python 2.x interpreter without additional dependencies. == Installation Qprompt can be installed with pip using the following command: `pip install qprompt` Additional, Qprompt can be installed from source by running: `python setup.py install` == Examples Example of basic information prompting: -------- include::examples/ask_1.py[] -------- Example of menu usage: -------- include::examples/menu_1.py[] -------- == Similar The following projects are similar and may be worth checking out: - https://github.com/Sleft/cliask[cliask] - https://github.com/aventurella/promptly[Promptly] - https://github.com/tylerdave/prompter[prompter] - https://github.com/magmax/python-inquirer[python-inquirer] ``` Update to the example section.
```asciidoc = Qprompt == Introduction This project provides a Python 2.x library that allows the user to quickly create CLI prompts for user input. The main features are the following: - Simple multi-entry menus. - Prompt for yes/no response. - Prompt for integer response. - Prompt for float response. - Optional default value. - Optional validity check. - Should work on any platform without additional dependencies. == Status Currently, this project is **under active development**. The contents of the repository should be considered unstable during active development. == Requirements Qprompt should run on any Python 2.x interpreter without additional dependencies. == Installation Qprompt can be installed with pip using the following command: `pip install qprompt` Additional, Qprompt can be installed from source by running: `python setup.py install` == Examples The following are basic examples of Qprompt (all examples can be found https://github.com/jeffrimko/Qprompt/tree/master/examples[here]): - https://github.com/jeffrimko/Qprompt/blob/master/examples/ask_1.py[`examples/ask_1.py`] - Basic info prompting. - https://github.com/jeffrimko/Qprompt/blob/master/examples/menu_1.py[`examples/menu_1.py`] - Basic menu usage. == Similar The following projects are similar and may be worth checking out: - https://github.com/Sleft/cliask[cliask] - https://github.com/aventurella/promptly[Promptly] - https://github.com/tylerdave/prompter[prompter] - https://github.com/magmax/python-inquirer[python-inquirer] ```
430cb0dc-c156-4c86-aa0e-2ebc06210652
{ "language": "AsciiDoc" }
```asciidoc *--map-http-status* 'TEXT':: Map non success HTTP response codes to exit codes other than 1. e.g. "--map-http-satus 403=0,404=0" would exit with 0 even if a 403 or 404 http error code was received. Valid exit codes are 0,1,50-99. include::format_option.adoc[] include::jmespath_option.adoc[] include::help_option.adoc[] include::verbose_option.adoc[] ``` Remove nested includes from adoc
```asciidoc *--map-http-status* 'TEXT':: Map non success HTTP response codes to exit codes other than 1. e.g. "--map-http-satus 403=0,404=0" would exit with 0 even if a 403 or 404 http error code was received. Valid exit codes are 0,1,50-99. *-F, --format* '[json|text]':: Set the output format for stdout. Defaults to "text". *--jq, --jmespath* 'EXPR':: Supply a JMESPath expression to apply to json output. Takes precedence over any specified '--format' and forces the format to be json processed by this expression. + A full specification of the JMESPath language for querying JSON structures may be found at https://jmespath.org/ *-h, --help*:: Show help text for this command. *-v, --verbose*:: Control the level of output. + Use -v or --verbose to show warnings and any additional text output. + Use -vv to add informative logging. + Use -vvv to add debug logging and full stack on any errors. (equivalent to -v --debug) ```
53f692fb-415b-4a57-8868-badedda05036
{ "language": "AsciiDoc" }
```asciidoc // Module included in the following assemblies: // // administering_a_cluster/dedicated-admin-role.adoc [id="dedicated-managing-dedicated-administrators_{context}"] = Managing {product-title} administrators Administrator roles are managed using a `dedicated-admins` group on the cluster. Existing members of this group can edit membership via the link:https://cloud.redhat.com/openshift[{cloud-redhat-com}] site. [id="dedicated-administrators-adding-user_{context}"] == Adding a user . Navigate to the *Cluster Details* page and *Users* tab. . Click the *Add user* button. (first user only) . Enter the user name and select the group (*dedicated-admins*) . Click the *Add* button. [id="dedicated-administrators-removing-user_{context}"] == Removing a user . Navigate to the *Cluster Details* page and *Users* tab. . Click the *X* to the right of the user / group combination to be deleted.. ``` Update documentation to current interface on cloud.rh.c
```asciidoc // Module included in the following assemblies: // // administering_a_cluster/dedicated-admin-role.adoc [id="dedicated-managing-dedicated-administrators_{context}"] = Managing {product-title} administrators Administrator roles are managed using a `dedicated-admins` group on the cluster. Existing members of this group can edit membership via the link:https://cloud.redhat.com/openshift[{cloud-redhat-com}] site. [id="dedicated-administrators-adding-user_{context}"] == Adding a user . Navigate to the *Cluster Details* page and *Access Control* tab. . Click the *Add user* button. (first user only) . Enter the user name and select the group (*dedicated-admins*) . Click the *Add* button. [id="dedicated-administrators-removing-user_{context}"] == Removing a user . Navigate to the *Cluster Details* page and *Access Control* tab. . Click the 3 vertical dots to the right of the user / group combination to show a menu, then click on *Delete*. ```
cf5bde9f-c339-48f9-b49b-4ae4ba869b52
{ "language": "AsciiDoc" }
```asciidoc 2020/06/18: Concuerror integration has been added. It is currently minimal but usable. Experimentation and feedback is welcome. 2020/11/30: Support for publishing Hex releases and docs has been added. It is currently experimental. Feedback is more than welcome. 2022/03/25: The -Wrace_conditions Dialyzer flag was removed as it is no longer available starting from OTP 25. 2022/??/??: Relx has been updated to v4. Relx v4 is no longer an escript, therefore breaking changes were introduced. The `RELX`, `RELX_URL` and `RELX_OPTS` variables were removed. The `relx` project must be added as a `DEPS`, `BUILD_DEPS` or `REL_DEPS` dependency to enable building releases. For example: `REL_DEPS = relx`. Relx itself has had some additional changes: the `start` command has been replaced by `daemon`, and configuration defaults have changed so that you may need to add the following to your relx.config file: ``` erlang {dev_mode, false}. {include_erts, true}. ``` ``` Set date for breaking Relx 4 change
```asciidoc 2020/06/18: Concuerror integration has been added. It is currently minimal but usable. Experimentation and feedback is welcome. 2020/11/30: Support for publishing Hex releases and docs has been added. It is currently experimental. Feedback is more than welcome. 2022/03/25: The -Wrace_conditions Dialyzer flag was removed as it is no longer available starting from OTP 25. 2022/05/20: Relx has been updated to v4. Relx v4 is no longer an escript, therefore breaking changes were introduced. The `RELX`, `RELX_URL` and `RELX_OPTS` variables were removed. The `relx` project must be added as a `DEPS`, `BUILD_DEPS` or `REL_DEPS` dependency to enable building releases. For example: `REL_DEPS = relx`. Relx itself has had some additional changes: the `start` command has been replaced by `daemon`, and configuration defaults have changed so that you may need to add the following to your relx.config file: ``` erlang {dev_mode, false}. {include_erts, true}. ``` ```
8904bd69-569f-4fd7-ba81-a1fdec0e1252
{ "language": "AsciiDoc" }
```asciidoc == Time Zones & Offset Extract a zone from a `java.time.ZonedDateTime`: ==== [source.code,clojure] ---- (t/zone (t/zoned-date-time "2000-01-01T00:00:00Z[Europe/Paris]")) ---- [source.code,clojure] ---- (t/zone) ---- ==== Create a `java.time.ZonedDateTime` in a particular time zone: ==== [source.code,clojure] ---- (t/in (t/instant "2000-01-01T00:00") "Australia/Darwin") ---- ==== === TBD : offsets``` Add offset examples to Zones
```asciidoc == Time Zones & Offset Extract a zone from a `java.time.ZonedDateTime`: ==== [source.code,clojure] ---- (t/zone (t/zoned-date-time "2000-01-01T00:00:00Z[Europe/Paris]")) ---- [source.code,clojure] ---- (t/zone) ---- ==== Create a `java.time.ZonedDateTime` in a particular time zone: ==== [source.code,clojure] ---- (t/in (t/instant "2000-01-01T00:00") "Australia/Darwin") ---- ==== Give the `OffsetDateTime` instead of `ZonedDateTime`: ==== [source.code,clojure] ---- (t/offset-date-time (t/zoned-date-time "2000-01-01T00:00:00Z[Australia/Darwin]")) ---- ==== Specify the offset for a `LocalDateTime`: ==== [source.code,clojure] ---- (t/offset-by (t/date-time "2018-01-01T00:00") 9) ---- ==== ```
48981c5d-fc97-44cd-a11e-08a18abb3055
{ "language": "AsciiDoc" }
```asciidoc = geo-shell Jared Erickson v0.7-SNAPSHOT ifndef::imagesdir[:imagesdir: images] include::intro.adoc[] include::workspace.adoc[] include::layer.adoc[] include::format.adoc[] include::raster.adoc[] include::tile.adoc[] include::style.adoc[] include::map.adoc[] include::builtin.adoc[]``` Add title image to pdf
```asciidoc = Geo Shell Jared Erickson v0.7-SNAPSHOT :title-logo-image: image:geoshell.png[pdfwidth=5.5in,align=center] ifndef::imagesdir[:imagesdir: images] include::intro.adoc[] include::workspace.adoc[] include::layer.adoc[] include::format.adoc[] include::raster.adoc[] include::tile.adoc[] include::style.adoc[] include::map.adoc[] include::builtin.adoc[]```
fc627ea2-3240-46ef-9bc2-875539341247
{ "language": "AsciiDoc" }
```asciidoc :generated: ../../../target/generated-docs/asciidoc include::{generated}/overview.adoc[] include::manual_rest_doc.adoc[] include::{generated}/paths.adoc[] ``` Add generated data type definitions to Swagger documentation
```asciidoc :generated: ../../../target/generated-docs/asciidoc include::{generated}/overview.adoc[] include::manual_rest_doc.adoc[] include::{generated}/paths.adoc[] include::{generated}/definitions.adoc[] ```
86d41eb8-e334-493f-a05b-a3cc369a6f84
{ "language": "AsciiDoc" }
```asciidoc = Spring Boot and Two DataSources This project demonstrates how to use two `DataSource` s with Spring Boot 2.1. It utilizes: * Spring Data https://github.com/spring-projects/spring-data-jpa[JPA] * https://github.com/flyway/flyway[Flyway] migrations for the two `DataSource` s * Separate Hibernate properties for each `DataSource` defined in the application.yml * Tests for components Note: It may take a few seconds for the app to start if no one has not accessed it recently``` Remove note about app starting up
```asciidoc = Spring Boot and Two DataSources This project demonstrates how to use two `DataSource` s with Spring Boot 2.1. It utilizes: * Spring Data https://github.com/spring-projects/spring-data-jpa[JPA] * https://github.com/flyway/flyway[Flyway] migrations for the two `DataSource` s * Separate Hibernate properties for each `DataSource` defined in the application.yml * Tests for components ```
f4da9495-899b-46e2-a1ff-cb31f61cbe23
{ "language": "AsciiDoc" }
```asciidoc [[development]] == Development Github repository: {datasource-proxy} === Build Documentation ```sh > ./mvnw asciidoctor:process-asciidoc@output-html ``` ``` Add how to use asciidoctor plugin in dev
```asciidoc [[development]] == Development Github repository: {datasource-proxy} === Build Documentation Generate `index.html` ```sh > ./mvnw asciidoctor:process-asciidoc@output-html ``` Http preview ```sh > ./mvnw asciidoctor:http@output-html ``````
5cd52882-485f-4478-a3d1-6e9106c63c92
{ "language": "AsciiDoc" }
```asciidoc = Infinispan Cluster Manager image:https://vertx.ci.cloudbees.com/buildStatus/icon?job=vert.x3-infinispan["Build Status",link="https://vertx.ci.cloudbees.com/view/vert.x-3/job/vert.x3-infinispan/"] This is a cluster manager implementation for Vert.x that uses http://infinispan.org[Infinispan]. Please see the in-source asciidoc documentation or the main documentation on the web-site for a full description of this component: * link:http://vertx.io/docs/vertx-infinispan/java/[web-site docs] * link:src/main/asciidoc/java/index.adoc[in-source docs] -- will remove -- ``` Revert "Revert "Revert "Test trigger on push"""
```asciidoc = Infinispan Cluster Manager image:https://vertx.ci.cloudbees.com/buildStatus/icon?job=vert.x3-infinispan["Build Status",link="https://vertx.ci.cloudbees.com/view/vert.x-3/job/vert.x3-infinispan/"] This is a cluster manager implementation for Vert.x that uses http://infinispan.org[Infinispan]. Please see the in-source asciidoc documentation or the main documentation on the web-site for a full description of this component: * link:http://vertx.io/docs/vertx-infinispan/java/[web-site docs] * link:src/main/asciidoc/java/index.adoc[in-source docs] ```
73884e02-5362-4451-810f-b0a316b829aa
{ "language": "AsciiDoc" }
```asciidoc The next transformations can be applied to any class to simplify greatly the development of Multi-Tenant applications. These include: - `@CurrentTenant` - Resolve the current tenant for the context of a class or method - `@Tenant` - Use a specific tenant for the context of a class or method - `@WithoutTenant` - Execute logic without a specific tenant (using the default connection) For example: [source,groovy] ---- import grails.gorm.multitenancy.* // resolve the current tenant for every method @CurrentTenant class TeamService { // execute the countPlayers method without a tenant id @WithoutTenant int countPlayers() { Player.count() } // use the tenant id "another" for all GORM logic within the method @Tenant({"another"}) List<Team> allTwoTeams() { Team.list() } List<Team> listTeams() { Team.list(max:10) } @Transactional void addTeam(String name) { new Team(name:name).save(flush:true) } } ----``` Replace "the next" with "the following"
```asciidoc The following transformations can be applied to any class to simplify greatly the development of Multi-Tenant applications. These include: - `@CurrentTenant` - Resolve the current tenant for the context of a class or method - `@Tenant` - Use a specific tenant for the context of a class or method - `@WithoutTenant` - Execute logic without a specific tenant (using the default connection) For example: [source,groovy] ---- import grails.gorm.multitenancy.* // resolve the current tenant for every method @CurrentTenant class TeamService { // execute the countPlayers method without a tenant id @WithoutTenant int countPlayers() { Player.count() } // use the tenant id "another" for all GORM logic within the method @Tenant({"another"}) List<Team> allTwoTeams() { Team.list() } List<Team> listTeams() { Team.list(max:10) } @Transactional void addTeam(String name) { new Team(name:name).save(flush:true) } } ---- ```
7637c3b2-1d0c-459c-9f27-61e64dcb5229
{ "language": "AsciiDoc" }
```asciidoc [[community-beats]] == Community Beats The open source community has been hard at work developing new Beats. You can check out a few of them here: [horizontal] https://github.com/Ingensi/dockerbeat[dockerbeat]:: Reads docker container statistics and indexes them in Elasticsearch https://github.com/christiangalsterer/httpbeat[httpbeat]:: Polls multiple HTTP(S) endpoints and sends the data to Logstash, Elasticsearch. Supports all HTTP methods and proxies. https://github.com/mrkschan/nginxbeat[nginxbeat]:: Reads status from Nginx https://github.com/joshuar/pingbeat[pingbeat]:: Sends ICMP pings to a list of targets and stores the round trip time (RTT) in Elasticsearch https://github.com/mrkschan/uwsgibeat[uwsgibeat]:: Reads stats from uWSGI https://github.com/kozlice/phpfpmbeat[phpfpmbeat]:: Reads status from PHP-FPM Have you created a Beat that's not listed? Open a pull request to add your link here: https://github.com/elastic/libbeat/blob/master/docs/communitybeats.asciidoc NOTE: Elastic provides no warranty or support for community-sourced Beats. [[contributing-beats]] === Contributing to Beats Remember, you can be a Beats developer, too. <<new-beat, Learn how>> ``` Add apachebeat to the list of beats from opensource
```asciidoc [[community-beats]] == Community Beats The open source community has been hard at work developing new Beats. You can check out a few of them here: [horizontal] https://github.com/Ingensi/dockerbeat[dockerbeat]:: Reads docker container statistics and indexes them in Elasticsearch https://github.com/christiangalsterer/httpbeat[httpbeat]:: Polls multiple HTTP(S) endpoints and sends the data to Logstash, Elasticsearch. Supports all HTTP methods and proxies. https://github.com/mrkschan/nginxbeat[nginxbeat]:: Reads status from Nginx https://github.com/joshuar/pingbeat[pingbeat]:: Sends ICMP pings to a list of targets and stores the round trip time (RTT) in Elasticsearch https://github.com/mrkschan/uwsgibeat[uwsgibeat]:: Reads stats from uWSGI https://github.com/kozlice/phpfpmbeat[phpfpmbeat]:: Reads status from PHP-FPM https://github.com/radoondas/apachebeat[apachebeat]:: Reads status from Apache HTTPD server-status Have you created a Beat that's not listed? Open a pull request to add your link here: https://github.com/elastic/libbeat/blob/master/docs/communitybeats.asciidoc NOTE: Elastic provides no warranty or support for community-sourced Beats. [[contributing-beats]] === Contributing to Beats Remember, you can be a Beats developer, too. <<new-beat, Learn how>> ```
0b33715e-fd78-41fc-9ac0-72862000e4db
{ "language": "AsciiDoc" }
```asciidoc = RESTful API Endpoint specification == Nodes === Idea for accessing fields directly * RUD: /nodes/:uuid/relatedProducts/:uuid -> Pageable list of nodes * R: /nodes/:uuid/name TODO: Do we want to restrict the primitiv types to read only? The user can update the node via PUT /nodes/:uuid anyway. == Webroot == Tags TODO: Define how tag familes are setup and tags are assigned to those families. == Users / Groups / Roles == Projects TODO: Define how languages are assigned to projects ``` Add section on breadcrumbs endpoint
```asciidoc = RESTful API Endpoint specification == Nodes === Idea for accessing fields directly * RUD: /nodes/:uuid/relatedProducts/:uuid -> Pageable list of nodes * R: /nodes/:uuid/name TODO: Do we want to restrict the primitiv types to read only? The user can update the node via PUT /nodes/:uuid anyway. == Breadcrumbs `/breadcrumb/:uuid` -> object containing breadcrumb to the node specified by uuid in the following format: [source,json] ---- [ { "uuid": "e0c64dsgasdgasdgdgasdgasdgasdg33", "name": "products" }, { "uuid": "e0c64ad00a9343cc864ad00a9373cc23", "name": "aeroplane.en.html" } ] ---- TODO: Where does the "name" property come from? It should be the field specified by "segmentField" in the schema. Two issues: 1. Should we normalize the property name to "name", or retain the name of the field specified by "segmentField"? 2. If the schema of a node in the breadcrumb path does not specify "segmentField", what do we do? Just display the uuid? == Webroot == Tags TODO: Define how tag familes are setup and tags are assigned to those families. == Users / Groups / Roles == Projects TODO: Define how languages are assigned to projects ```
0bba6305-13d3-48b3-9cc2-5515a221457e
{ "language": "AsciiDoc" }
```asciidoc = conoha/dokku-apps .Add pytohn-getting-started app ---- alias dokku="ssh -t dokku@conoha" cd python-getting-started dokku apps:create python-getting-started git remote add dokku dokku@conoha:python-getting-started git push dokku master ---- And this app can be available at http://python-getting-started.d.10sr.f5.si . ``` Add note to add keys for dokku
```asciidoc = conoha/dokku-apps First you have to run: ---- cat .ssh/id_rsa.pub | ssh conoha 'sudo sshcommand acl-add dokku dokkudeploy' ---- .Add pytohn-getting-started app ---- alias dokku="ssh -t dokku@conoha" cd python-getting-started dokku apps:create python-getting-started git remote add dokku dokku@conoha:python-getting-started git push dokku master ---- And this app can be available at http://python-getting-started.d.10sr.f5.si . ```
1b896351-255f-4e16-890f-8028fa55a229
{ "language": "AsciiDoc" }
```asciidoc = Continuous integration :awestruct-description: Check if the latest nightly build passes all automated tests. :awestruct-layout: normalBase :showtitle: == OptaPlanner We use Jenkins for continuous integration. *Show https://hudson.jboss.org/hudson/job/optaplanner/[the public Jenkins job].* This is a mirror of a Red Hat internal Jenkins job. Keep the build blue! == Project website (optaplanner.org) We use Travis to build this project website, see https://travis-ci.org/droolsjbpm/optaplanner-website[the travis job]. ``` Update public Jenkins job URL
```asciidoc = Continuous integration :awestruct-description: Check if the latest nightly build passes all automated tests. :awestruct-layout: normalBase :showtitle: == OptaPlanner We use Jenkins for continuous integration. *Show https://jenkins-kieci.rhcloud.com/job/optaplanner/[the public Jenkins job].* This is a mirror of a Red Hat internal Jenkins job. Keep the build green! == Project website (optaplanner.org) We use Travis to build this project website, see https://travis-ci.org/droolsjbpm/optaplanner-website[the travis job]. ```
f51f06dc-7af1-49fb-a7bc-d47bfd582a91
{ "language": "AsciiDoc" }
```asciidoc // Module included in the following assemblies: // // * dns/dns-operator.adoc [id="nw-dns-operator-logs_{context}"] = DNS Operator logs You can view DNS Operator logs by using the `oc logs` command. .Procedure View the logs of the DNS Operator: ---- $ oc logs --namespace=openshift-dns-operator deployment/dns-operator ---- ``` Fix command to get DNS logs
```asciidoc // Module included in the following assemblies: // // * dns/dns-operator.adoc [id="nw-dns-operator-logs_{context}"] = DNS Operator logs You can view DNS Operator logs by using the `oc logs` command. .Procedure View the logs of the DNS Operator: ---- $ oc logs -n openshift-dns-operator deployment/dns-operator -c dns-operator ---- ```
3978fc16-18ef-4b6b-a9fe-4108aee4ba92
{ "language": "AsciiDoc" }
```asciidoc :filename: manual/03_task_exportEA.adoc ifndef::imagesdir[:imagesdir: ../images] = exportEA IMPORTANT: Currently this feature is WINDOWS-only. https://github.com/docToolchain/docToolchain/issues/231[See related issue] include::feedback.adoc[] image::ea/Manual/exportEA.png[] TIP: Blog-Posts: https://rdmueller.github.io/jria2eac/[JIRA to Sparx EA], https://rdmueller.github.io/sparx-ea/[Did you ever wish you had better Diagrams?] == Source .build.gradle [source,groovy] ---- include::../../../scripts/exportEA.gradle[tags=exportEA] ---- .scripts/exportEAP.vbs [source] ---- include::../../../scripts/exportEAP.vbs[] ----``` Add documentation for the parameters offered by the exportEA configuration.
```asciidoc :filename: manual/03_task_exportEA.adoc ifndef::imagesdir[:imagesdir: ../images] = exportEA IMPORTANT: Currently this feature is WINDOWS-only. https://github.com/docToolchain/docToolchain/issues/231[See related issue] include::feedback.adoc[] image::ea/Manual/exportEA.png[] TIP: Blog-Posts: https://rdmueller.github.io/jria2eac/[JIRA to Sparx EA], https://rdmueller.github.io/sparx-ea/[Did you ever wish you had better Diagrams?] == Configuration By default no special configuration is necessary. But, to be more specific on the project and its packages to be used for export, two optional parameter configurations are available. The parameters can be used independently from each other. A sample how to edit your projects Config.groovy is given in the 'Config.groovy' of the docToolchain project itself. connection:: Set the connection to a certain project or comment it out to use all project files inside the src folder or its child folder. packageFilter:: Add one or multiple packageGUIDs to be used for export. All packages are analysed, if no packageFilter is set. == Source .build.gradle [source,groovy] ---- include::../../../scripts/exportEA.gradle[tags=exportEA] ---- .scripts/exportEAP.vbs [source] ---- include::../../../scripts/exportEAP.vbs[] ---- ```
7176e7c8-3c98-4f74-b08d-0a24ccb948a0
{ "language": "AsciiDoc" }
```asciidoc :doctitle: :author: Jerod Gawne :email: [email protected] :docdate: June 07, 2018 :revdate: {docdatetime} :src-uri: https://github.com/jerodg/hackerrank :difficulty: :time-complexity: :required-knowledge: :advanced-knowledge: :solution-variability: :score: :keywords: python, {required-knowledge}, {advanced-knowledge} :summary: :doctype: article :sectanchors: :sectlinks: :sectnums: :toc: {summary} == Learning == Tutorial == Improving the Template === Convention .Missing * shebang * encoding * doc-comments === Extraneous N/A === Pep8 * No new-line at end of file === Syntax N/A == Reference ``` Add Improving the Problem section.
```asciidoc :doctitle: :author: Jerod Gawne :email: [email protected] :docdate: June 07, 2018 :revdate: {docdatetime} :src-uri: https://github.com/jerodg/hackerrank :difficulty: :time-complexity: :required-knowledge: :advanced-knowledge: :solution-variability: :score: :keywords: python, {required-knowledge}, {advanced-knowledge} :summary: :doctype: article :sectanchors: :sectlinks: :sectnums: :toc: {summary} == Learning == Tutorial == Improving the Problem == Improving the Template === Convention .Missing * shebang * encoding * doc-comments === Extraneous N/A === Pep8 * No new-line at end of file === Syntax N/A == Reference ```
86c08316-e63d-4248-b363-e15bed9a8f39
{ "language": "AsciiDoc" }
```asciidoc # Camel Supervising Route Controller Example Spring Boot This example shows how to work with a simple Apache Camel application using Spring Boot and a Supervising Route Controller. ## How to run You can run this example using mvn spring-boot:run Beside JMX you can use Spring Boot Endpoints to interact with the routes: * To get info about the routes + [source] ---- curl -XGET -s http://localhost:8080/actuator/camelroutes ---- + +* To get details about a route ++ +[source] +---- +curl -XGET -s http://localhost:8080/actuator/camelroutes/{id}/detail +---- * To get info about a route + [source] ---- curl -XGET -s http://localhost:8080/actuator/camelroutes/{id}/info ---- * To stop a route + [source] ---- curl -XPOST -H "Content-Type: application/json" -s http://localhost:8080/actuator/camelroutes/{id}/stop ---- * To start a route + [source] ---- curl -XPOST -H "Content-Type: application/json" -s http://localhost:8080/actuator/camelroutes/{id}/start ---- ## More information You can find more information about Apache Camel at the website: http://camel.apache.org/ ``` Fix the example document error
```asciidoc # Camel Supervising Route Controller Example Spring Boot This example shows how to work with a simple Apache Camel application using Spring Boot and a Supervising Route Controller. ## How to run You can run this example using mvn spring-boot:run Beside JMX you can use Spring Boot Endpoints to interact with the routes: * To get info about the routes + [source] ---- curl -XGET -s http://localhost:8080/actuator/camelroutes ---- * To get details about a route + [source] ---- curl -XGET -s http://localhost:8080/actuator/camelroutes/{id}/detail ---- * To get info about a route + [source] ---- curl -XGET -s http://localhost:8080/actuator/camelroutes/{id}/info ---- * To stop a route + [source] ---- curl -XPOST -H "Content-Type: application/json" -s http://localhost:8080/actuator/camelroutes/{id}/stop ---- * To start a route + [source] ---- curl -XPOST -H "Content-Type: application/json" -s http://localhost:8080/actuator/camelroutes/{id}/start ---- ## More information You can find more information about Apache Camel at the website: http://camel.apache.org/ ```
aa726cac-dba1-4a54-a9ed-66c9956034b8
{ "language": "AsciiDoc" }
```asciidoc = The Ehcache 3.x line is currently the development line. Status of the build: image:https://ehcache.ci.cloudbees.com/buildStatus/icon?job=ehcache3 For more information, you might want to go check the https://github.com/ehcache/ehcache3/wiki[wiki]. image:http://cloudbees.prod.acquia-sites.com/sites/default/files/styles/large/public/Button-Built-on-CB-1.png?itok=3Tnkun-C``` Fix image tags in updated readme
```asciidoc = The Ehcache 3.x line is currently the development line. Status of the build: image:https://ehcache.ci.cloudbees.com/buildStatus/icon?job=ehcache3[Ehcache@Cloudbees, link="https://ehcache.ci.cloudbees.com/job/ehcache3/"] For more information, you might want to go check the https://github.com/ehcache/ehcache3/wiki[wiki]. image:http://cloudbees.prod.acquia-sites.com/sites/default/files/styles/large/public/Button-Powered-by-CB.png?itok=uMDWINfY[Cloudbees, link="http://www.cloudbees.com/resources/foss"]```
737a0873-7656-4bba-9751-276e7188db49
{ "language": "AsciiDoc" }
```asciidoc = clublist - Club Membership List Track members for a small non-profit club. This shows off some basic functionality of JPA and DeltaSpike Data in a JSF environment. == Deployment . Copy config-sample.properties to config.properties, change the name in the orgName property in this file from 'Sample Club' to your organization's name (keep it short). . Setup the datasource in your app server . Setup the deployment as needed (e.g., edit jboss-deployment.xml) . Create a user in the role of club_exec in your app server. . Deploy (e.g., mvn wildfly:deploy if you use JBoss WildFly). . Enjoy! == ToDo Security: Maybe allow members to update their own record (only!) Use redirect after editing to really go back to the List page. "Position" should be a relationship to another Entity, with a dropdown chooser. "Membership Type" should be a relationship to another Entity, with a dropdown. Search with 'Like' method in DS Data A confirmation (p:dialog?) on the Edit->Delete button would be a good idea. Implement the Mailing List page. Implement the Print Member Badge/Label page. Even though people should not use spreadsheets for database work, you will probably be pressured to impelement the "Export" capability. You will need Apache POI for this. Refactor Home object to merge w/ darwinsys-ee EntityHome ``` Format ToDo as a list
```asciidoc = clublist - Club Membership List Track members for a small non-profit club. This shows off some basic functionality of JPA and DeltaSpike Data in a JSF environment. == Deployment . Copy config-sample.properties to config.properties, change the name in the orgName property in this file from 'Sample Club' to your organization's name (keep it short). . Setup the datasource in your app server . Setup the deployment as needed (e.g., edit jboss-deployment.xml) . Create a user in the role of club_exec in your app server. . Deploy (e.g., mvn wildfly:deploy if you use JBoss WildFly). . Enjoy! == ToDo Here are some things that should be added. https://github.com/IanDarwin/clublist[Fork this project on GitHub] and send pull requests when you get one working! . Search with 'Like' method in DS Data . Use redirect after editing to really go back to the List page. . Maybe allow members to update their own record (only!) Probably requires moving to app-managed security since you already have a record for each person. . "Position" should be a relationship to another Entity, with a dropdown chooser. . "Membership Type" should be a relationship to another Entity, with a dropdown. . A confirmation (p:dialog?) on the Edit->Delete button would be a good idea. . Implement the Mailing List page. . Implement the Print Member Badge/Label page. . Even though people should not use spreadsheets for database work, you will probably be pressured to impelement the "Export" capability. You will need Apache POI for this. . Refactor Home object to merge w/ darwinsys-ee EntityHome ```
25b7f1b1-303d-4e9e-af7f-2f070ac0de94
{ "language": "AsciiDoc" }
```asciidoc proxy ===== [quote] A development proxy with logging and redirect-rewriting Installation ------------ [source,bash] ---- go get -u github.com/ciarand/proxy ---- Usage ----- [source,bash] ---- # start the proxy in one shell: proxy -from=https://www.google.com -to=http://0.0.0.0:8080 # and in another, run curl: curl -s http://localhost:8080/ | head -c 15 <!doctype html> # result from proxy shell (shortened for width): INFO[0022] request started client_address=[::1]:58988 method=GET uri=/ INFO[0023] request complete elapsed=624.644152ms status=200 ---- License ------- See the link:LICENSE[LICENSE] file. ``` Add prebuilt binary installation instructions
```asciidoc proxy ===== [quote] A development proxy with logging and redirect-rewriting Installation ------------ Download a prebuilt binary for your platform and architecture from the link:https://github.com/ciarand/proxy/releases[release page]. Or, build from source: [source,bash] ---- # from source go get -u github.com/ciarand/proxy ---- Usage ----- [source,bash] ---- # start the proxy in one shell: proxy -from=https://www.google.com -to=http://0.0.0.0:8080 # and in another, run curl: curl -s http://localhost:8080/ | head -c 15 <!doctype html> # result from proxy shell (shortened for width): INFO[0022] request started client_address=[::1]:58988 method=GET uri=/ INFO[0023] request complete elapsed=624.644152ms status=200 ---- License ------- See the link:LICENSE[LICENSE] file. ```
511501e6-7e5a-431a-86d2-6d41a0028aa0
{ "language": "AsciiDoc" }
```asciidoc # android-images Yet another repo with docker images for Android developers [source,planzuml] ------ node "java jdk-8" as jdk8 node "java jdk-7" as jdk7 artifact "Android" { node "gradle" as gradle node "sdk" as sdk node "ndk 11" as ndk11 node "ndk 13" as ndk13 node "vlc" as vlc } artifact "Tools" { node "ruby" as ruby node "Asciidoctor" as asciidoctor } gradle -up-> jdk8 sdk -up-> gradle ndk11 -up-> sdk ndk13 -up-> sdk vlc -up-> ndk11 ruby -up-> jdk8 asciidoctor -up-> ruby ------``` Fix typo in plantuml definition
```asciidoc # android-images Yet another repo with docker images for Android developers [source,plantuml] ------ node "java jdk-8" as jdk8 node "java jdk-7" as jdk7 artifact "Android" { node "gradle" as gradle node "sdk" as sdk node "ndk 11" as ndk11 node "ndk 13" as ndk13 node "vlc" as vlc } artifact "Tools" { node "ruby" as ruby node "Asciidoctor" as asciidoctor } gradle -up-> jdk8 sdk -up-> gradle ndk11 -up-> sdk ndk13 -up-> sdk vlc -up-> ndk11 ruby -up-> jdk8 asciidoctor -up-> ruby ------ ```
e949f11d-4e23-4b73-86ce-a706943568fb
{ "language": "AsciiDoc" }
```asciidoc = Blueprint :author: Hafid Haddouti image:https://travis-ci.org/haf-tech/blueprint.svg?branch=master["Build Status", link="https://travis-ci.org/haf-tech/blueprint"] image:https://img.shields.io/badge/License-Apache%202.0-blue.svg["License", link="https://opensource.org/licenses/Apache-2.0"] .... Blueprint is a playground for illustrating different paradigms. In the meantime the following concepts are integrated or planned: - Spring Boot - AsciiDoctor integration, with UML and different outputs - Onion architecture - Docker build/push Next: - reactive programming ``` Add link to GitHup page
```asciidoc = Blueprint :author: Hafid Haddouti image:https://travis-ci.org/haf-tech/blueprint.svg?branch=master["Build Status", link="https://travis-ci.org/haf-tech/blueprint"] image:https://img.shields.io/badge/License-Apache%202.0-blue.svg["License", link="https://opensource.org/licenses/Apache-2.0"] Blueprint is a playground for illustrating different paradigms. In the meantime the following concepts are integrated or planned: - Spring Boot - AsciiDoctor integration, with UML and different outputs - Onion architecture - Docker build/push Next: - reactive programming An up to date documentation is https://haf-tech.github.io/blueprint/[online] available. ```
19056f95-d0d1-4cc3-b524-03564fd56e77
{ "language": "AsciiDoc" }
```asciidoc = Who's Using Debezium? :awestruct-layout: doc :linkattrs: :icons: font :source-highlighter: highlight.js Debezium is used in production by a wide range of companies and organizations. This list contains users of Debezium who agreed to serve as public reference; where available, further resources with more details are linked. If your organization would like to be added to (or removed from) this list, please send a pull request for updating the https://github.com/debezium/debezium.github.io/blob/develop/docs/users.asciidoc[source of this page]. * Convoy (https://medium.com/convoy-tech/logs-offsets-near-real-time-elt-with-apache-kafka-snowflake-473da1e4d776[details]) * JW Player (https://www.slideshare.net/jwplayer/polylog-a-logbased-architecture-for-distributed-systems-124997666[details]) * OYO * Usabilla by Surveymonkey * WePay, Inc. (https://wecode.wepay.com/posts/streaming-databases-in-realtime-with-mysql-debezium-kafka[details], https://wecode.wepay.com/posts/streaming-cassandra-at-wepay-part-1[more details]) * ... and you? Then let us know and get added to the list, too. Thanks! ``` Fix broken link to source of page
```asciidoc = Who's Using Debezium? :awestruct-layout: doc :linkattrs: :icons: font :source-highlighter: highlight.js Debezium is used in production by a wide range of companies and organizations. This list contains users of Debezium who agreed to serve as public reference; where available, further resources with more details are linked. If your organization would like to be added to (or removed from) this list, please send a pull request for updating the https://github.com/debezium/debezium.github.io/blob/develop/community/users.asciidoc[source of this page]. * Convoy (https://medium.com/convoy-tech/logs-offsets-near-real-time-elt-with-apache-kafka-snowflake-473da1e4d776[details]) * JW Player (https://www.slideshare.net/jwplayer/polylog-a-logbased-architecture-for-distributed-systems-124997666[details]) * OYO * Usabilla by Surveymonkey * WePay, Inc. (https://wecode.wepay.com/posts/streaming-databases-in-realtime-with-mysql-debezium-kafka[details], https://wecode.wepay.com/posts/streaming-cassandra-at-wepay-part-1[more details]) * ... and you? Then let us know and get added to the list, too. Thanks! ```
3bb08d27-dd13-48dc-b670-267f3e361895
{ "language": "AsciiDoc" }
```asciidoc = Sample Book Author Name <[email protected]> v1.0, October 4, 2015: First Draft :doctype: book :docinfo: :toc: left :toclevels: 2 :sectnums: :linkcss: An sample book to show case AsciiDoctor folder structure. include::book/chapter-1/chapter-1.adoc[leveloffset=+1] include::book/chapter-2/chapter-2.adoc[leveloffset=+1] ``` Add extra line between include.
```asciidoc = Sample Book Author Name <[email protected]> v1.0, October 4, 2015: First Draft :doctype: book :docinfo: :toc: left :toclevels: 2 :sectnums: :linkcss: An sample book to show case AsciiDoctor folder structure. include::book/chapter-1/chapter-1.adoc[leveloffset=+1] include::book/chapter-2/chapter-2.adoc[leveloffset=+1] ```
a87bace7-3de7-475d-8ba5-e8699da3dbab
{ "language": "AsciiDoc" }
```asciidoc = bitcoinj-addons Release Process == Main Release Process . Update `CHANGELOG.adoc` . Set versions .. `README.adoc` .. bitcoinj-groovy `ExtensionModule` .. `gradle.properties` . Commit version bump and changelog. . Tag: `git tag -a v0.x.y -m "Release 0.x.y"` . Push: `git push --tags origin master` . Full build, test .. `./gradlew clean jenkinsBuild regTest` .. Recommended: test with *OmniJ* regTests. . Publish to Bintray: .. `./gradlew bintrayUpload` .. Confirm publish of artifacts in Bintray Web UI. . Update github-pages site (including JavaDoc): `./gradlew publishSite` == Announcements . Not yet. == After release . Set versions back to -SNAPSHOT .. `gradle.properties` .. bitcoinj-groovy `ExtensionModule` .. *Not* `README.adoc` -- it should match release version . Commit and push to master ``` Fix minor error in release process doc.
```asciidoc = bitcoinj-addons Release Process == Main Release Process . Update `CHANGELOG.adoc` . Set versions .. `README.adoc` .. bitcoinj-groovy `ExtensionModule` .. `build.gradle` (should move to `gradle.properties`) . Commit version bump and changelog. . Tag: `git tag -a v0.x.y -m "Release 0.x.y"` . Push: `git push --tags origin master` . Full build, test .. `./gradlew clean jenkinsBuild regTest` .. Recommended: test with *OmniJ* regTests. . Publish to Bintray: .. `./gradlew bintrayUpload` .. Confirm publish of artifacts in Bintray Web UI. . Update github-pages site (including JavaDoc): `./gradlew publishSite` == Announcements . Not yet. == After release . Set versions back to -SNAPSHOT .. `gradle.properties` .. bitcoinj-groovy `ExtensionModule` .. *Not* `README.adoc` -- it should match release version . Commit and push to master ```
d91d3d9b-c0ab-4cb3-936b-1ece2e5b8895
{ "language": "AsciiDoc" }
```asciidoc |=== |image:http://goreportcard.com/badge/spohnan/ci-bot-01["Go Report Card",link="http://goreportcard.com/report/spohnan/ci-bot-01", window="_blank"]|image:https://travis-ci.org/spohnan/ci-bot-01.svg?branch=master["Build Status", link="https://travis-ci.org/spohnan/ci-bot-01", window="_blank"] |=== === Automated GitHub Interactions ``` Add travis badge to readme
```asciidoc [options="header"] |=== |CI Build and Tests|Static Analysis |image:https://travis-ci.org/spohnan/ci-bot-01.svg?branch=master["Build Status", link="https://travis-ci.org/spohnan/ci-bot-01", window="_blank"]|image:http://goreportcard.com/badge/spohnan/ci-bot-01["Go Report Card",link="http://goreportcard.com/report/spohnan/ci-bot-01", window="_blank"] |=== === Automated GitHub Interactions ```
d81095b1-529b-44e7-a8bc-6af2b062e3df
{ "language": "AsciiDoc" }
```asciidoc [[SpringCloud-SpringCloud]] Spring Cloud ~~~~~~~~~~~ *Available as of Camel 2.19* Spring Cloud component Maven users will need to add the following dependency to their `pom.xml` in order to use this component: [source,xml] ------------------------------------------------------------------------------------------------ <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring-cloud</artifactId> <version>${camel.version}</version> <!-- use the same version as your Camel core version --> </dependency> ------------------------------------------------------------------------------------------------ `camel-spring-cloud` jar comes with the `spring.factories` file, so as soon as you add that dependency into your classpath, Spring Boot will automatically auto-configure Camel for you. [[SpringCloud-CamelSpringCloudStarter]] Camel Spring Cloud Starter ^^^^^^^^^^^^^^^^^^^^^^^^^ *Available as of Camel 2.19* To use the starter, add the following to your spring boot pom.xml file: [source,xml] ------------------------------------------------------ <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring-cloud-starter</artifactId> <version>${camel.version}</version> <!-- use the same version as your Camel core version --> </dependency> ------------------------------------------------------ ``` Fix copy and paste doc
```asciidoc === Spring Cloud Netflix *Available as of Camel 2.19* The Spring Cloud Netflix component bridges Camel Cloud and Spring Cloud Netflix so you can leverage Spring Cloud Netflix service discovery and load balance features in Camel and/or you can use Camel Service Discovery implementations as ServerList source for Spring Cloud Netflix's Ribbon load balabncer. Maven users will need to add the following dependency to their `pom.xml` in order to use this component: [source,xml] ---- <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring-cloud-netflix</artifactId> <version>${camel.version}</version> <!-- use the same version as your Camel core version --> </dependency> ---- `camel-spring-cloud-netflix` jar comes with the `spring.factories` file, so as soon as you add that dependency into your classpath, Spring Boot will automatically auto-configure Camel for you. You can disable Camel Spring Cloud Netflix with the following properties: [source,properties] ---- # Enable/Disable the whole integration, default true camel.cloud.netflix = true # Enable/Disable the integration with Ribbon, default true camel.cloud.netflix.ribbon = true ---- === Spring Cloud Netflix Starter *Available as of Camel 2.19* To use the starter, add the following to your spring boot pom.xml file: [source,xml] ---- <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-spring-cloud-netflix-starter</artifactId> <version>${camel.version}</version> <!-- use the same version as your Camel core version --> </dependency> ---- ```
553fa6e2-38b2-44cc-90b6-a57aef4adf96
{ "language": "AsciiDoc" }
```asciidoc = Filebeat :libbeat: http://www.elastic.co/guide/en/beats/libbeat/1.0.0-rc1 :version: 1.0.0-rc1 include::./overview.asciidoc[] include::./getting-started.asciidoc[] include::./fields.asciidoc[] include::./configuration.asciidoc[] include::./command-line.asciidoc[] include::./migration.asciidoc[] include::./support.asciidoc[] ``` Use master version in docs
```asciidoc = Filebeat :libbeat: http://www.elastic.co/guide/en/beats/libbeat/master :version: master include::./overview.asciidoc[] include::./getting-started.asciidoc[] include::./fields.asciidoc[] include::./configuration.asciidoc[] include::./command-line.asciidoc[] include::./migration.asciidoc[] include::./support.asciidoc[] ```
4ce846b4-62ef-49ab-a98e-5069c1c6c490
{ "language": "AsciiDoc" }
```asciidoc [float] [[breaking_70_scripting_changes]] === Scripting changes [float] ==== getDate() and getDates() removed Fields of type `long` and `date` had `getDate()` and `getDates()` methods (for multi valued fields) to get an object with date specific helper methods for the current doc value. In 5.3.0, `date` fields were changed to expose this same date object directly when calling `doc["myfield"].value`, and the getter methods for date objects were deprecated. These methods have now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. [float] ==== Script errors will return as `400` error codes Malformed scripts, either in search templates, ingest pipelines or search requests, return `400 - Bad request` while they would previously return `500 - Internal Server Error`. This also applies for stored scripts. ``` Add migration info for missing values in script
```asciidoc [float] [[breaking_70_scripting_changes]] === Scripting changes [float] ==== getDate() and getDates() removed Fields of type `long` and `date` had `getDate()` and `getDates()` methods (for multi valued fields) to get an object with date specific helper methods for the current doc value. In 5.3.0, `date` fields were changed to expose this same date object directly when calling `doc["myfield"].value`, and the getter methods for date objects were deprecated. These methods have now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. [float] ==== Accessing missing document values will throw an error `doc['field'].value` will throw an exception if the document is missing a value for the field `field`. To check if a document is missing a value, you can use `doc['field'].size() == 0`. [float] ==== Script errors will return as `400` error codes Malformed scripts, either in search templates, ingest pipelines or search requests, return `400 - Bad request` while they would previously return `500 - Internal Server Error`. This also applies for stored scripts. ```
e331c907-956f-4fd4-a41d-9f0d3379915e
{ "language": "AsciiDoc" }
```asciidoc = Maven Goals This plugin supports the following goals which are explained in detail in the next sections. .Plugin Goals [cols="1,3"] |=== |Goal | Description |**<<{plugin}:build>>** |Build images |**<<{plugin}:start>>** or **<<{plugin}:start,{plugin}:run>>** |Create and start containers |**<<{plugin}:stop>>** |Stop and destroy containers |**<<{plugin}:push>>** |Push images to a registry |**<<{plugin}:watch>>** |Watch for doing rebuilds and restarts |**<<{plugin}:remove>>** |Remove images from local docker host |**<<{plugin}:logs>>** |Show container logs |**<<{plugin}:source>>** |Attach docker build archive to Maven project |**<<{plugin}:save>>** |Save images to a file |**<<{plugin}:volume-create>>** |Create a volume for containers to share data |**<<{plugin}:volume-remove>>** |Remove a volume |=== Note that all goals are orthogonal to each other. For example in order to start a container for your application you typically have to build its image before. `{plugin}:start` does *not* imply building the image so you should use it then in combination with `{plugin}:build`. ``` Add default Maven lifecycle bindings to the manual
```asciidoc = Maven Goals This plugin supports the following goals which are explained in detail in the next sections. .Plugin Goals [cols="1,1,2"] |=== |Goal | Default Lifecycle Phase | Description |**<<{plugin}:build>>** |install |Build images |**<<{plugin}:start>>** or **<<{plugin}:start,{plugin}:run>>** |pre-integration-test |Create and start containers |**<<{plugin}:stop>>** |post-integration-test |Stop and destroy containers |**<<{plugin}:push>>** |deploy |Push images to a registry |**<<{plugin}:watch>>** | |Watch for doing rebuilds and restarts |**<<{plugin}:remove>>** |post-integration-test |Remove images from local docker host |**<<{plugin}:logs>>** | |Show container logs |**<<{plugin}:source>>** | |Attach docker build archive to Maven project |**<<{plugin}:save>>** | |Save images to a file |**<<{plugin}:volume-create>>** |pre-integration-test |Create a volume for containers to share data |**<<{plugin}:volume-remove>>** |post-integration-test |Remove a volume |=== Note that all goals are orthogonal to each other. For example in order to start a container for your application you typically have to build its image before. `{plugin}:start` does *not* imply building the image so you should use it then in combination with `{plugin}:build`. ```
0119b2d0-a6c8-4d71-972e-d4ea83852974
{ "language": "AsciiDoc" }
```asciidoc # Hacky Implicit Provisioning version 2 This document describes a quick hack to test the implicit provisioning flow as it currently works in aktualizr before full support is available in meta-updater and on the server. ## Goals * end-to-end installation of updates using OSTree ## Steps 1. Edit `recipes-sota/aktualizr/files/sota_autoprov.toml` in meta-updater: * Remove the `provision_path` line. * Add a line in the `[tls]` section with `server = "https://..."`. Use the URL from your account's credentials.zip file. 1. Edit `recipes-sota/aktualizr/aktualizr_git.bb` in meta-updater: * Change aktualizr `SRC_URI` to `git://github.com/advancedtelematic/aktualizr;branch=bugfix/cert_prov_install`. * Change `SRCREV` to `13828774baa5a7369e6f5ca552b879ad1ce773d5`. * Increment `PR`. 1. Build a standard image using bitbake. Make sure to set `SOTA_PACKED_CREDENTIALS` like normal. 1. Boot the image. 1. Optionally, verify that aktualizr is not provisioning. Make sure the device is not visible in the Garage. 1. Run `cert_provider` from the aktualizr repo: `cert_provider -c credentials.zip -t <device>` 1. Verify that aktualizr provisions correctly with the server using the device_id generated by `cert_provider`. ``` Use latest version of aktualizr. Add known issue.
```asciidoc # Hacky Implicit Provisioning version 2 This document describes a quick hack to test the implicit provisioning flow as it currently works in aktualizr before full support is available in meta-updater and on the server. ## Goals * end-to-end installation of updates using OSTree ## Steps 1. Edit `recipes-sota/aktualizr/files/sota_autoprov.toml` in meta-updater: * Remove the `provision_path` line. * Add a line in the `[tls]` section with `server = "https://..."`. Use the URL from your account's credentials.zip file. 1. Edit `recipes-sota/aktualizr/aktualizr_git.bb` in meta-updater: * Change `SRCREV` to `1c635c67d70cf38d2c841d449e750f08855e41a4`. * Increment `PR`. 1. Build a standard image using bitbake. Make sure to set `SOTA_PACKED_CREDENTIALS` like normal. 1. Boot the image. 1. Optionally, verify that aktualizr is not provisioning. Make sure the device is not visible in the Garage. 1. Run `cert_provider` from the aktualizr repo: `cert_provider -c credentials.zip -t <device>` 1. Verify that aktualizr provisions correctly with the server using the device_id generated by `cert_provider`. ## Known Issues At this time, although the device will be shown in the Garage, its installed package will not be. ```
ce196bd9-c2d0-4442-8be0-b653c5b59cd3
{ "language": "AsciiDoc" }
```asciidoc = Videos James Elliott <[email protected]> :icons: font // Set up support for relative links on GitHub; add more conditions // if you need to support other environments and extensions. ifdef::env-github[:outfilesuffix: .adoc] This page collects performance videos that highlight Afterglow in action. If you have any to share, please post them to the https://gitter.im/brunchboy/afterglow[Afterglow room on Gitter]! https://vimeo.com/153492480[image:assets/Deepower-2015.png[Deepower Live Report,align="right",float="right"]] Since I have been too busy working on the software to get out and perform with it, I am deeply grateful to https://github.com/dandaka[Vlad Rafeev] for getting this page started by sharing a video of a https://www.facebook.com/deepowerband/[Deepower audiovisual] show he lit using Afterglow in December, 2015, in Kaliningrad, Russia last month. He modestly says, “My first experience with lights, still a lot to accomplish.” And I say, there is a ton left to implement in the software too. But it is already fun to see https://vimeo.com/153492480[great video like this]! ``` Work around Github image float issue.
```asciidoc = Videos James Elliott <[email protected]> :icons: font // Set up support for relative links on GitHub; add more conditions // if you need to support other environments and extensions. ifdef::env-github[:outfilesuffix: .adoc] This page collects performance videos that highlight Afterglow in action. If you have any to share, please post them to the https://gitter.im/brunchboy/afterglow[Afterglow room on Gitter]! +++<a href="https://vimeo.com/153492480"><img src="assets/Deepower-2015.png" align="right" alt="Deepower Live Report"></a>+++ Since I have been too busy working on the software to get out and perform with it, I am deeply grateful to https://github.com/dandaka[Vlad Rafeev] for getting this page started by sharing a video of a https://www.facebook.com/deepowerband/[Deepower audiovisual] show he lit using Afterglow in December, 2015, in Kaliningrad, Russia last month. He modestly says, “My first experience with lights, still a lot to accomplish.” And I say, there is a ton left to implement in the software too. But it is already fun to see https://vimeo.com/153492480[great video like this]! ```
c5882384-b0e4-44ea-ae3e-48506da2593e
{ "language": "AsciiDoc" }
```asciidoc [id="about-migration"] = About migrating {product-title} 3 to 4 include::modules/common-attributes.adoc[] :context: about-migration toc::[] {product-title} 4 includes new technologies and functionality that results in a cluster that is self-managing, flexible, and automated. The way that {product-title} 4 clusters are deployed and managed drastically differs from {product-title} 3. To successfully transition from {product-title} 3 to {product-title} 4, it is important that you review the following information: xref:../../migration/migrating_3_4/planning-migration-3-to-4.adoc#planning-migration-3-to-4[Planning your transition]:: Learn about the differences between {product-title} versions 3 and 4. Prior to transitioning, be sure that you have reviewed and prepared for storage, networking, logging, security, and monitoring considerations. xref:../../migration/migrating_3_4/migrating-application-workloads-3-4.adoc#migrating-application-workloads-3-4[Performing your migration]:: Learn about and use the tools to perform your migration: * {mtc-full} ({mtc-short}) to migrate your application workloads ``` Update tools sentence after removing CPMA
```asciidoc [id="about-migration"] = About migrating {product-title} 3 to 4 include::modules/common-attributes.adoc[] :context: about-migration toc::[] {product-title} 4 includes new technologies and functionality that results in a cluster that is self-managing, flexible, and automated. The way that {product-title} 4 clusters are deployed and managed drastically differs from {product-title} 3. To successfully transition from {product-title} 3 to {product-title} 4, it is important that you review the following information: xref:../../migration/migrating_3_4/planning-migration-3-to-4.adoc#planning-migration-3-to-4[Planning your transition]:: Learn about the differences between {product-title} versions 3 and 4. Prior to transitioning, be sure that you have reviewed and prepared for storage, networking, logging, security, and monitoring considerations. xref:../../migration/migrating_3_4/migrating-application-workloads-3-4.adoc#migrating-application-workloads-3-4[Performing your migration]:: Learn about and use the {mtc-full} ({mtc-short}) to migrate your application workloads. ```
a525b09c-a9fa-4760-bcfb-16d49c793c51
{ "language": "AsciiDoc" }
```asciidoc = Jira Release Notes Generator (JiraRnGen) :Author: David Thompson :Email: <[email protected]> :Revision: 0.1.0 2016-08-03 == Description I was looking for a way with hosted Jira to be able to send out release notes with our own template. Since the hosted version doesn't allow you to edit the templates, I decided to build an application that worked with the Jira api to build an html page that could be emailed to our team. This NodeJS app is the result of this work. == ReleaseNotes 0.1.0 - First iteration at being able to send Jira Release Notes == Usage First time to use, run within the JiraRnGen directory. ---- $ npm install ---- Then to run each time ---- $ node release-notes.js -u username -p password --email -n jira-hostname -f release-name -s email-address ---- ``` Update docs for running at start.
```asciidoc = Jira Release Notes Generator (JiraRnGen) :Author: David Thompson :Email: <[email protected]> :Revision: 0.1.0 2016-08-03 == Description I was looking for a way with hosted Jira to be able to send out release notes with our own template. Since the hosted version doesn't allow you to edit the templates, I decided to build an application that worked with the Jira api to build an html page that could be emailed to our team. This NodeJS app is the result of this work. == ReleaseNotes 0.1.0 - First iteration at being able to send Jira Release Notes == Usage First time to use, run within the JiraRnGen directory. ---- $ npm install ---- You will need to create a image/logo.json file; you can copy the example to logo.json for testing. You will need to set up a sendgrid account and set up an API key to use with sendgrid. Copy conf/config-example.json to conf/config.json. Then to run each time ---- $ node release-notes.js -u username -p password --email -n jira-hostname -f release-name -s email-address ---- ```
f71d9c50-834c-4bd9-b3b7-01eabc9d2d5f
{ "language": "AsciiDoc" }
```asciidoc // Global settings :ascii-ids: :encoding: UTF-8 :lang: en :icons: font :toc: left :toclevels: 3 :numbered: [[gu]] = Users Guide :author: Copyright (c) 2014-2016 The OpenNMS Group, Inc. :revnumber: {opennms-product-name} {opennms-version} :revdate: {last-update-label} {docdatetime} :version-label!: [[gu-service-assurance]] == Service Assurance include::text/service-assurance/introduction.adoc[] include::text/service-assurance/critical-service.adoc[] include::text/service-assurance/path-outage.adoc[] include::text/surveillance-view.adoc[] include::text/dashboard.adoc[] [[gu-bsm]] == Busines Service Monitoring include::text/bsm/introduction.adoc[] include::text/bsm/business-service-hierarchy.adoc[] include::text/bsm/operational-status.adoc[] include::text/bsm/root-cause-impact-analysis.adoc[] include::text/bsm/simulation-mode.adoc[] include::text/bsm/share-bsm-view.adoc[] include::text/bsm/change-icons.adoc[] [[gu-alarms]] == Alarms include::text/alarms/alarm-notes.adoc[] ``` Fix typo in Busines -> Business
```asciidoc // Global settings :ascii-ids: :encoding: UTF-8 :lang: en :icons: font :toc: left :toclevels: 3 :numbered: [[gu]] = Users Guide :author: Copyright (c) 2014-2016 The OpenNMS Group, Inc. :revnumber: {opennms-product-name} {opennms-version} :revdate: {last-update-label} {docdatetime} :version-label!: [[gu-service-assurance]] == Service Assurance include::text/service-assurance/introduction.adoc[] include::text/service-assurance/critical-service.adoc[] include::text/service-assurance/path-outage.adoc[] include::text/surveillance-view.adoc[] include::text/dashboard.adoc[] [[gu-bsm]] == Business Service Monitoring include::text/bsm/introduction.adoc[] include::text/bsm/business-service-hierarchy.adoc[] include::text/bsm/operational-status.adoc[] include::text/bsm/root-cause-impact-analysis.adoc[] include::text/bsm/simulation-mode.adoc[] include::text/bsm/share-bsm-view.adoc[] include::text/bsm/change-icons.adoc[] [[gu-alarms]] == Alarms include::text/alarms/alarm-notes.adoc[] ```
d2f2c632-d042-4819-bda5-f1280c2e824a
{ "language": "AsciiDoc" }
```asciidoc image:https://jenkins-kieci.rhcloud.com/buildStatus/icon?job=optaplanner["Build Status", link="https://jenkins-kieci.rhcloud.com/job/optaplanner"] == Developing Drools, OptaPlanner and jBPM *If you want to build or contribute to a droolsjbpm project, https://github.com/droolsjbpm/droolsjbpm-build-bootstrap/blob/master/README.md[read this document].* *It will save you and us a lot of time by setting up your development environment correctly.* It solves all known pitfalls that can disrupt your development. It also describes all guidelines, tips and tricks. If you want your pull requests (or patches) to be merged into master, please respect those guidelines. ``` Add build instructions to readme
```asciidoc image:https://jenkins-kieci.rhcloud.com/buildStatus/icon?job=optaplanner["Build Status", link="https://jenkins-kieci.rhcloud.com/job/optaplanner"] == Quick development start To build and run from source: [source,sh] ---- $ mvn clean install $ cd optaplanner-examples $ mvn exec:java ---- To develop with IntelliJ IDEA, Eclipse or NetBeans, open the root `pom.xml` as a new project and configure a _Run/Debug configuration_ like this: * Main class: `org.optaplanner.examples.app.OptaPlannerExamplesApp` * VM options: `-Xmx2G -server` (Memory only needed when using the big datasets in the examples) * Program arguments: `` * Working directory: `$MODULE_DIR$` (must resolve to optaplanner-examples directory) * Use classpath of module: `optaplanner-examples` == Developing Drools, OptaPlanner and jBPM *If you want to build or contribute to a droolsjbpm project, https://github.com/droolsjbpm/droolsjbpm-build-bootstrap/blob/master/README.md[read this document].* *It will save you and us a lot of time by setting up your development environment correctly.* It solves all known pitfalls that can disrupt your development. It also describes all guidelines, tips and tricks. If you want your pull requests (or patches) to be merged into master, please respect those guidelines. ```
72465e4c-7c43-4917-839a-107a1be8a3df
{ "language": "AsciiDoc" }
```asciidoc = Explorer for Hawkular http://hawkular.org/[Hawkular] is a set of components for Monitoring. This explorer connects to a Hawkular server and allows to browse trough inventory and view entities, graph metrics. ifndef::env-github[] image::docs/screenshot.png[] endif::[] ifdef::env-github[] image::https://github.com/pilhuhn/hawkfx/blob/master/docs/screenshot.png[] endif::[] == Running The explorer requires JRuby in version 9+ If you are using `rvm` you can select it via `rvm use jruby-9.0.5.0` then use `bundler` to install the required gems `bundle install` then run `jruby hawkfx.rb```` Make build step more clear.
```asciidoc = Explorer for Hawkular http://hawkular.org/[Hawkular] is a set of components for Monitoring. This explorer connects to a Hawkular server and allows to browse trough inventory and view entities, graph metrics. ifndef::env-github[] image::docs/screenshot.png[] endif::[] ifdef::env-github[] image::https://github.com/pilhuhn/hawkfx/blob/master/docs/screenshot.png[] endif::[] == Running The explorer requires JRuby in version 9+ If you are using `rvm` you can select it via `rvm use jruby-9.0.5.0` install and use `bundler` to install the required gems `gem install bundler` `bundle install` then run `jruby hawkfx.rb````
4bf15c83-9492-4fd0-b6f6-22154ded2bf5
{ "language": "AsciiDoc" }
```asciidoc [[mapping-id-field]] === `_id` field Each document indexed is associated with a <<mapping-type-field,`_type`>> (see <<mapping-type>>) and an <<mapping-id-field,`_id`>>. The `_id` field is not indexed as its value can be derived automatically from the <<mapping-uid-field,`_uid`>> field. The value of the `_id` field is accessible in certain queries (`term`, `terms`, `match`, `query_string`, `simple_query_string`) and scripts, but _not_ in aggregations or when sorting, where the <<mapping-uid-field,`_uid`>> field should be used instead: [source,js] -------------------------- # Example documents PUT my_index/my_type/1 { "text": "Document with ID 1" } PUT my_index/my_type/2 { "text": "Document with ID 2" } GET my_index/_search { "query": { "terms": { "_id": [ "1", "2" ] <1> } }, "script_fields": { "UID": { "script": "doc['_id']" <2> } } } -------------------------- // AUTOSENSE <1> Querying on the `_id` field (also see the <<query-dsl-ids-query,`ids` query>>) <2> Accessing the `_id` field in scripts (inline scripts must be <<enable-dynamic-scripting,enabled>> for this example to work) ``` Fix docs example for the _id field, the field is not accessible in scripts
```asciidoc [[mapping-id-field]] === `_id` field Each document indexed is associated with a <<mapping-type-field,`_type`>> (see <<mapping-type>>) and an <<mapping-id-field,`_id`>>. The `_id` field is not indexed as its value can be derived automatically from the <<mapping-uid-field,`_uid`>> field. The value of the `_id` field is accessible in certain queries (`term`, `terms`, `match`, `query_string`, `simple_query_string`), but _not_ in aggregations, scripts or when sorting, where the <<mapping-uid-field,`_uid`>> field should be used instead: [source,js] -------------------------- # Example documents PUT my_index/my_type/1 { "text": "Document with ID 1" } PUT my_index/my_type/2 { "text": "Document with ID 2" } GET my_index/_search { "query": { "terms": { "_id": [ "1", "2" ] <1> } } } -------------------------- // AUTOSENSE <1> Querying on the `_id` field (also see the <<query-dsl-ids-query,`ids` query>>) ```
70cfcbaf-37da-474a-a2b4-92715d4e81f1
{ "language": "AsciiDoc" }
```asciidoc [[plugins_list]] == List of plugins This is a non-exhaustive list of Erlang.mk plugins, sorted alphabetically. === elvis.mk An https://github.com/inaka/elvis.mk[Elvis plugin] for Erlang.mk. Elvis is an https://github.com/inaka/elvis[Erlang style reviewer]. === geas https://github.com/crownedgrouse/geas[Geas] gives aggregated information on a project and its dependencies, and is available as an Erlang.mk plugin. === hexer.mk An https://github.com/inaka/hexer.mk[Hex plugin] for Erlang.mk. Hex is a https://hex.pm/[package manager for the Elixir ecosystem]. === reload.mk A https://github.com/bullno1/reload.mk[live reload plugin] for Erlang.mk. ``` Add elixir.mk and lfe.mk to the plugins list
```asciidoc [[plugins_list]] == List of plugins This is a non-exhaustive list of Erlang.mk plugins, sorted alphabetically. === elixir.mk An https://github.com/botsunit/elixir.mk[Elixir plugin] for Erlang.mk. http://elixir-lang.org/[Elixir] is an alternative language for the BEAM. === elvis.mk An https://github.com/inaka/elvis.mk[Elvis plugin] for Erlang.mk. Elvis is an https://github.com/inaka/elvis[Erlang style reviewer]. === geas https://github.com/crownedgrouse/geas[Geas] gives aggregated information on a project and its dependencies, and is available as an Erlang.mk plugin. === hexer.mk An https://github.com/inaka/hexer.mk[Hex plugin] for Erlang.mk. Hex is a https://hex.pm/[package manager for the Elixir ecosystem]. === lfe.mk An https://github.com/ninenines/lfe.mk[LFE plugin] for Erlang.mk. LFE, or http://lfe.io/[Lisp Flavoured Erlang], is an alternative language for the BEAM. === reload.mk A https://github.com/bullno1/reload.mk[live reload plugin] for Erlang.mk. ```
375d3857-5ef7-4688-988f-6dbcf3df56cb
{ "language": "AsciiDoc" }
```asciidoc = Json-lib :version: 3.0.0.SNAPSHOT :linkattrs: :project-name: json-lib image:http://img.shields.io/travis/aalmiray/{project-name}/development.svg["Build Status", link="https://travis-ci.org/aalmiray/{project-name}"] image:http://img.shields.io/coveralls/aalmiray/{project-name}/development.svg["Coverage Status", link="https://coveralls.io/r/aalmiray/{project-name}"] image:http://img.shields.io/:semver-{version}-blue.svg["Semantic Versioning", link="http://semver.org"] image:http://img.shields.io/badge/license-ASF2-blue.svg["Apache License 2", link="http://www.apache.org/licenses/LICENSE-2.0.txt"] image:http://img.shields.io/badge/download-latest-bb00bb.svg[link="https://bintray.com/aalmiray/kordamp/{project-name}/_latestVersion"] --- JSON-lib is a java library for transforming beans, maps, collections, java arrays and XML to JSON and back again to beans and DynaBeans. Refer to the link:http://aalmiray.github.io/{project-name}/[project guide, window="_blank"] for further information on configuration and usage. ``` Fix project name in readme
```asciidoc = Json-lib :version: 3.0.0.SNAPSHOT :linkattrs: image:http://img.shields.io/travis/aalmiray/Json-lib/development.svg["Build Status", link="https://travis-ci.org/aalmiray/Json-lib"] image:http://img.shields.io/coveralls/aalmiray/Json-lib/development.svg["Coverage Status", link="https://coveralls.io/r/aalmiray/Json-lib"] image:http://img.shields.io/:semver-{version}-blue.svg["Semantic Versioning", link="http://semver.org"] image:http://img.shields.io/badge/license-ASF2-blue.svg["Apache License 2", link="http://www.apache.org/licenses/LICENSE-2.0.txt"] image:http://img.shields.io/badge/download-latest-bb00bb.svg[link="https://bintray.com/aalmiray/kordamp/json-lib/_latestVersion"] --- JSON-lib is a java library for transforming beans, maps, collections, java arrays and XML to JSON and back again to beans and DynaBeans. Refer to the link:http://aalmiray.github.io/json-lib/[project guide, window="_blank"] for further information on configuration and usage. ```
008d5979-1e29-4e66-9883-246e37e238bf
{ "language": "AsciiDoc" }
```asciidoc == ANDROID MAVEN PLUGIN A plugin for Android application development with http://maven.apache.org[Apache Maven 3.0.3+] and the http://tools.android.com[Android SDK]. === Links * http://code.google.com/p/maven-android-plugin[Project site] with wiki and more * http://code.google.com/p/maven-android-plugin/issues/list[Issue tracker] * http://maven-android-plugin-m2site.googlecode.com/svn/index.html[Maven generated plugin documentation site] * http://www.sonatype.com/books/mvnref-book/reference/android-dev.html[Maven: Complete Reference - Chapter - Android Application Development with Maven] * https://groups.google.com/forum/?fromgroups#!forum/maven-android-developers[Mailinglist] * http://code.google.com/p/maven-android-plugin/wiki/Changelog[Changelog] * http://jenkins.josefson.org/[Continuous Integration Server Builds] === Contributions We welcome your feature enhancements and bug fixes in pull requests! ``` Change link to CI server.
```asciidoc == ANDROID MAVEN PLUGIN A plugin for Android application development with http://maven.apache.org[Apache Maven 3.0.3+] and the http://tools.android.com[Android SDK]. === Links * http://code.google.com/p/maven-android-plugin[Project site] with wiki and more * http://code.google.com/p/maven-android-plugin/issues/list[Issue tracker] * http://maven-android-plugin-m2site.googlecode.com/svn/index.html[Maven generated plugin documentation site] * http://www.sonatype.com/books/mvnref-book/reference/android-dev.html[Maven: Complete Reference - Chapter - Android Application Development with Maven] * https://groups.google.com/forum/?fromgroups#!forum/maven-android-developers[Mailinglist] * http://code.google.com/p/maven-android-plugin/wiki/Changelog[Changelog] * image:https://travis-ci.org/jayway/maven-android-plugin.png["Build Status", link="https://travis-ci.org/jayway/maven-android-plugin"] === Contributions We welcome your feature enhancements and bug fixes in pull requests! ```
e6e696e0-1837-4bdd-a832-f16599f95399
{ "language": "AsciiDoc" }
```asciidoc = Install a Git Server == Start Services . All services can be started, in detached mode, by giving the command: + docker-compose up -d + And this shows the output as: + Creating git_serverdata... Creating gitserver_git_dbdata_1... Creating gitserver_git_db_1... Creating git... + . Configure the installation .. execute the following script + ./install-gogs.sh <DOCKER_HOST_IP> <git_server_PORT> + _example: ./install.sh 192.168.99.100 3000_ == Sign Up . Access to this URL: http://dockerhost:3000/user/sign_up . Create an account and enjoy! . Enjoy! :) ``` Fix usage command with install-gogs.sh
```asciidoc = Install a Git Server == Start Services . All services can be started, in detached mode, by giving the command: + docker-compose up -d + And this shows the output as: + Creating git_serverdata... Creating gitserver_git_dbdata_1... Creating gitserver_git_db_1... Creating git... + . Configure the installation .. execute the following script + ./install-gogs.sh <DOCKER_HOST_IP> <git_server_PORT> + _example: ./install-gogs.sh 192.168.99.100 3000_ == Sign Up . Access to this URL: http://dockerhost:3000/user/sign_up . Create an account and enjoy! . Enjoy! :) ```
f87b447e-c093-4315-9467-2ed86c97c528
{ "language": "AsciiDoc" }
```asciidoc = cowboy_router:compile(3) == Name cowboy_router:compile - Compile routes to the resources == Description [source,erlang] ---- compile(cowboy_router:routes()) -> cowboy_router:dispatch_rules() ---- Compile routes to the resources. Takes a human readable list of routes and transforms it into a form more efficient to process. == Arguments Routes:: Human readable list of routes. == Return value An opaque dispatch rules value is returned. This value must be given to Cowboy as a middleware environment value. == Changelog * *1.0*: Function introduced. == Examples .Compile routes and start a listener [source,erlang] ---- Dispatch = cowboy_router:compile([ {'_', [ {"/", toppage_h, []}, {"/[...], cowboy_static, {priv_dir, my_example_app, ""}} ]} ]), {ok, _} = cowboy:start_clear(example, [{port, 8080}], #{ env => #{dispatch => Dispatch} }). ---- == See also link:man:cowboy_router(3)[cowboy_router(3)] ``` Fix an example missing a " in the manual
```asciidoc = cowboy_router:compile(3) == Name cowboy_router:compile - Compile routes to the resources == Description [source,erlang] ---- compile(cowboy_router:routes()) -> cowboy_router:dispatch_rules() ---- Compile routes to the resources. Takes a human readable list of routes and transforms it into a form more efficient to process. == Arguments Routes:: Human readable list of routes. == Return value An opaque dispatch rules value is returned. This value must be given to Cowboy as a middleware environment value. == Changelog * *1.0*: Function introduced. == Examples .Compile routes and start a listener [source,erlang] ---- Dispatch = cowboy_router:compile([ {'_', [ {"/", toppage_h, []}, {"/[...]", cowboy_static, {priv_dir, my_example_app, ""}} ]} ]), {ok, _} = cowboy:start_clear(example, [{port, 8080}], #{ env => #{dispatch => Dispatch} }). ---- == See also link:man:cowboy_router(3)[cowboy_router(3)] ```
ecb4d145-2b78-4bec-ae57-d496cdd4a0eb
{ "language": "AsciiDoc" }
```asciidoc [[search]] = Search == The SearchContent Resource When a Store extending `Searchable` is exported, a `searchContent` endpoint will be available at the `/{store}/searchContent` URI. ==== [source, sh] ---- curl -H 'Accept: application/hal+json' http://localhost:8080/searchContent?queryString=foo ---- ==== === Supported HTTP Methods As the SearchContent resource is read-only it supports `GET` only. All other HTTP methods will cause a `405 Method Not Allowed`. ==== Supported media types - `application/hal+json` - `application/json`. ``` Add documentation explaining the format of the searchContent response payload
```asciidoc [[search]] = Search == The SearchContent Resource When a Store extending `Searchable` is exported, a `searchContent` endpoint will be available at the `/{store}/searchContent` URI. ==== [source, sh] ---- curl -H 'Accept: application/hal+json' http://localhost:8080/searchContent?queryString=foo ---- ==== === Supported HTTP Methods As the SearchContent resource is read-only it supports `GET` only. All other HTTP methods will cause a `405 Method Not Allowed`. ==== Supported media types - `application/hal+json` - `application/json`. === Format of the response payload This resource can return entities, or a custom search result type, depending on how the Searchable interface is specified and the type of Store it decorates. When Searchable decorates an AssociativeStore this resource will lookup and return representations of the content's associated entities. These lookups can be made more efficient by specifying a `@FulltextEntityLookupQuery` query method. This is a custom `findAll` method that accepts a single `Collection` parameter annotated with the name `contentIds`, as follows: ``` public interface MyRepository extends CrudRepository<MyEntity, Long> { @FulltextEntityLookupQuery List<MyEntity> findAllByContentIdIn(@Param("contentIds") List<UUID> contentIds); } public interface MyStore extends AssociativeStore<MyEntity, UUID>, Searchable<UUID> {} ``` When Searchable is typed to your own search return type the resource will return a representation of this type instead. See `Search Return Types` in the respective documentation for your chosen Spring Content fulltext module; solr or elasticsearch, for more information on specifying a custom search return types.```
3ba1d750-891b-44bb-8eb0-67c8eb8e8dc2
{ "language": "AsciiDoc" }
```asciidoc // Module included in the following assemblies: // // * web_console/customizing-the-web-console.adoc [id="creating-custom-notification-banners_{context}"] = Creating custom notification banners .Prerequisites * You must have administrator privileges. .Procedure . From *Administration* -> *Custom Resource Definitions*, click on *ConsoleNotification*. . Click *YAML* and edit the file: + ---- apiVersion: console.openshift.io/v1 kind: ConsoleNotification metadata: name: example spec: backgroundColor: '#0088ce' color: '#fff' link: href: 'https://www.example.com' text: Optional link text location: BannerTop <1> text: This is an example notification message with an optional link. ---- <1> Valid location settings are `BannerTop`, `BannerBottom`, and `BannerTopBottom`. . Click the *Save* button to apply your changes. ``` Fix navigation path and CRD format in creating-custom-notification-banners
```asciidoc // Module included in the following assemblies: // // * web_console/customizing-the-web-console.adoc [id="creating-custom-notification-banners_{context}"] = Creating custom notification banners .Prerequisites * You must have administrator privileges. .Procedure . From *Administration* -> *Custom Resource Definitions*, click on *ConsoleNotification*. . Select *Instances* tab . Click *Create Console Notification* and edit the file: + ---- apiVersion: console.openshift.io/v1 kind: ConsoleNotification metadata: name: example spec: text: This is an example notification message with an optional link. location: BannerTop <1> link: href: 'https://www.example.com' text: Optional link text color: '#fff' backgroundColor: '#0088ce' ---- <1> Valid location settings are `BannerTop`, `BannerBottom`, and `BannerTopBottom`. . Click the *Create* button to apply your changes. ```
6f3e079d-0bda-46cb-a0f7-50231a509665
{ "language": "AsciiDoc" }
```asciidoc [id="persistent-storage-using-fibre"] = Persistent storage using Fibre Channel include::modules/common-attributes.adoc[] :context: persistent-storage-fibre toc::[] {product-title} supports Fibre Channel, allowing you to provision your {product-title} cluster with persistent storage using Fibre channel volumes. Some familiarity with Kubernetes and Fibre Channel is assumed. The Kubernetes persistent volume framework allows administrators to provision a cluster with persistent storage and gives users a way to request those resources without having any knowledge of the underlying infrastructure. Persistent volumes are not bound to a single project or namespace; they can be shared across the {product-title} cluster. Persistent volume claims are specific to a project or namespace and can be requested by users. [IMPORTANT] ==== High availability of storage in the infrastructure is left to the underlying storage provider. ==== .Additional resources * link:https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/7/html/storage_administration_guide/ch-fibrechanel[Fibre Channel] include::modules/persistent-storage-fibre-provisioning.adoc[leveloffset=+1] include::modules/persistent-storage-fibre-disk-quotas.adoc[leveloffset=+2] include::modules/persistent-storage-fibre-volume-security.adoc[leveloffset=+2] ``` Update FC link to RHEL8 doc
```asciidoc [id="persistent-storage-using-fibre"] = Persistent storage using Fibre Channel include::modules/common-attributes.adoc[] :context: persistent-storage-fibre toc::[] {product-title} supports Fibre Channel, allowing you to provision your {product-title} cluster with persistent storage using Fibre channel volumes. Some familiarity with Kubernetes and Fibre Channel is assumed. The Kubernetes persistent volume framework allows administrators to provision a cluster with persistent storage and gives users a way to request those resources without having any knowledge of the underlying infrastructure. Persistent volumes are not bound to a single project or namespace; they can be shared across the {product-title} cluster. Persistent volume claims are specific to a project or namespace and can be requested by users. [IMPORTANT] ==== High availability of storage in the infrastructure is left to the underlying storage provider. ==== .Additional resources * link:https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/managing_storage_devices/using-fibre-channel-devices_managing-storage-devices[Using Fibre Channel devices] include::modules/persistent-storage-fibre-provisioning.adoc[leveloffset=+1] include::modules/persistent-storage-fibre-disk-quotas.adoc[leveloffset=+2] include::modules/persistent-storage-fibre-volume-security.adoc[leveloffset=+2] ```