Dataset Viewer
problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.71k
18.9k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 465
23.6k
| num_tokens_prompt
int64 556
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_32737 | rasdani/github-patches | git_diff | dask__dask-586 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Additional improvement for da.image.imread
I think following 2 improvements make the function better.
#### 1. Allow custom `imread` function.
Some prefer `opencv` which reads color in BGR order, otherwise `skimage` reads in RGB order. Adding `dialect` option (like `dialect='skimage'` or `dialect='cv'`) or accept different file read function may better.
#### 2. Allow custom preprocessing function.
In image processing, input images may have different sizes. In these case, preprocessing is needed between image read and array creation. This preprocessing function must return the same size of array on user's responsibility.
</issue>
<code>
[start of dask/array/image.py]
1 from glob import glob
2 import os
3
4 try:
5 from skimage.io import imread as sk_imread
6 except ImportError:
7 pass
8
9 from .core import Array
10 from ..base import tokenize
11
12 def add_leading_dimension(x):
13 return x[None, ...]
14
15
16 def imread(filename):
17 """ Read a stack of images into a dask array
18
19 Parameters
20 ----------
21
22 filename: string
23 A globstring like 'myfile.*.png'
24
25 Example
26 -------
27
28 >>> from dask.array.image import imread
29 >>> im = imread('2015-*-*.png') # doctest: +SKIP
30 >>> im.shape # doctest: +SKIP
31 (365, 1000, 1000, 3)
32
33 Returns
34 -------
35
36 Dask array of all images stacked along the first dimension. All images
37 will be treated as individual chunks
38 """
39 filenames = sorted(glob(filename))
40 if not filenames:
41 raise ValueError("No files found under name %s" % filename)
42
43 name = 'imread-%s' % tokenize(filenames, map(os.path.getmtime, filenames))
44
45 sample = sk_imread(filenames[0])
46
47 dsk = dict(((name, i) + (0,) * len(sample.shape),
48 (add_leading_dimension, (sk_imread, filename)))
49 for i, filename in enumerate(filenames))
50
51 chunks = ((1,) * len(filenames),) + tuple((d,) for d in sample.shape)
52
53 return Array(dsk, name, chunks, sample.dtype)
54
[end of dask/array/image.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dask/array/image.py b/dask/array/image.py
--- a/dask/array/image.py
+++ b/dask/array/image.py
@@ -13,7 +13,7 @@
return x[None, ...]
-def imread(filename):
+def imread(filename, imread=None, preprocess=None):
""" Read a stack of images into a dask array
Parameters
@@ -21,6 +21,13 @@
filename: string
A globstring like 'myfile.*.png'
+ imread: function (optional)
+ Optionally provide custom imread function.
+ Function should expect a filename and produce a numpy array.
+ Defaults to ``skimage.io.imread``.
+ preprocess: function (optional)
+ Optionally provide custom function to preprocess the image.
+ Function should expect a numpy array for a single image.
Example
-------
@@ -36,17 +43,25 @@
Dask array of all images stacked along the first dimension. All images
will be treated as individual chunks
"""
+ imread = imread or sk_imread
filenames = sorted(glob(filename))
if not filenames:
raise ValueError("No files found under name %s" % filename)
name = 'imread-%s' % tokenize(filenames, map(os.path.getmtime, filenames))
- sample = sk_imread(filenames[0])
-
- dsk = dict(((name, i) + (0,) * len(sample.shape),
- (add_leading_dimension, (sk_imread, filename)))
- for i, filename in enumerate(filenames))
+ sample = imread(filenames[0])
+ if preprocess:
+ sample = preprocess(sample)
+
+ keys = [(name, i) + (0,) * len(sample.shape) for i in range(len(filenames))]
+ if preprocess:
+ values = [(add_leading_dimension, (preprocess, (imread, filename)))
+ for filename in filenames]
+ else:
+ values = [(add_leading_dimension, (imread, filename))
+ for filename in filenames]
+ dsk = dict(zip(keys, values))
chunks = ((1,) * len(filenames),) + tuple((d,) for d in sample.shape)
| {"golden_diff": "diff --git a/dask/array/image.py b/dask/array/image.py\n--- a/dask/array/image.py\n+++ b/dask/array/image.py\n@@ -13,7 +13,7 @@\n return x[None, ...]\n \n \n-def imread(filename):\n+def imread(filename, imread=None, preprocess=None):\n \"\"\" Read a stack of images into a dask array\n \n Parameters\n@@ -21,6 +21,13 @@\n \n filename: string\n A globstring like 'myfile.*.png'\n+ imread: function (optional)\n+ Optionally provide custom imread function.\n+ Function should expect a filename and produce a numpy array.\n+ Defaults to ``skimage.io.imread``.\n+ preprocess: function (optional)\n+ Optionally provide custom function to preprocess the image.\n+ Function should expect a numpy array for a single image.\n \n Example\n -------\n@@ -36,17 +43,25 @@\n Dask array of all images stacked along the first dimension. All images\n will be treated as individual chunks\n \"\"\"\n+ imread = imread or sk_imread\n filenames = sorted(glob(filename))\n if not filenames:\n raise ValueError(\"No files found under name %s\" % filename)\n \n name = 'imread-%s' % tokenize(filenames, map(os.path.getmtime, filenames))\n \n- sample = sk_imread(filenames[0])\n-\n- dsk = dict(((name, i) + (0,) * len(sample.shape),\n- (add_leading_dimension, (sk_imread, filename)))\n- for i, filename in enumerate(filenames))\n+ sample = imread(filenames[0])\n+ if preprocess:\n+ sample = preprocess(sample)\n+\n+ keys = [(name, i) + (0,) * len(sample.shape) for i in range(len(filenames))]\n+ if preprocess:\n+ values = [(add_leading_dimension, (preprocess, (imread, filename)))\n+ for filename in filenames]\n+ else:\n+ values = [(add_leading_dimension, (imread, filename))\n+ for filename in filenames]\n+ dsk = dict(zip(keys, values))\n \n chunks = ((1,) * len(filenames),) + tuple((d,) for d in sample.shape)\n", "issue": "Additional improvement for da.image.imread\nI think following 2 improvements make the function better.\n#### 1. Allow custom `imread` function.\n\nSome prefer `opencv` which reads color in BGR order, otherwise `skimage` reads in RGB order. Adding `dialect` option (like `dialect='skimage'` or `dialect='cv'`) or accept different file read function may better.\n#### 2. Allow custom preprocessing function.\n\nIn image processing, input images may have different sizes. In these case, preprocessing is needed between image read and array creation. This preprocessing function must return the same size of array on user's responsibility.\n\n", "before_files": [{"content": "from glob import glob\nimport os\n\ntry:\n from skimage.io import imread as sk_imread\nexcept ImportError:\n pass\n\nfrom .core import Array\nfrom ..base import tokenize\n\ndef add_leading_dimension(x):\n return x[None, ...]\n\n\ndef imread(filename):\n \"\"\" Read a stack of images into a dask array\n\n Parameters\n ----------\n\n filename: string\n A globstring like 'myfile.*.png'\n\n Example\n -------\n\n >>> from dask.array.image import imread\n >>> im = imread('2015-*-*.png') # doctest: +SKIP\n >>> im.shape # doctest: +SKIP\n (365, 1000, 1000, 3)\n\n Returns\n -------\n\n Dask array of all images stacked along the first dimension. All images\n will be treated as individual chunks\n \"\"\"\n filenames = sorted(glob(filename))\n if not filenames:\n raise ValueError(\"No files found under name %s\" % filename)\n\n name = 'imread-%s' % tokenize(filenames, map(os.path.getmtime, filenames))\n\n sample = sk_imread(filenames[0])\n\n dsk = dict(((name, i) + (0,) * len(sample.shape),\n (add_leading_dimension, (sk_imread, filename)))\n for i, filename in enumerate(filenames))\n\n chunks = ((1,) * len(filenames),) + tuple((d,) for d in sample.shape)\n\n return Array(dsk, name, chunks, sample.dtype)\n", "path": "dask/array/image.py"}]} | 1,119 | 497 |
gh_patches_debug_3876 | rasdani/github-patches | git_diff | xorbitsai__inference-299 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
FEAT: Disable Gradio Telemetry
Pull requests are disabled but see here:
https://github.com/arch-btw/inference/pull/1
</issue>
<code>
[start of examples/gradio_chatinterface.py]
1 from typing import Dict, List
2
3 import gradio as gr
4
5 from xinference.client import Client
6
7 if __name__ == "__main__":
8 import argparse
9 import textwrap
10
11 parser = argparse.ArgumentParser(
12 formatter_class=argparse.RawDescriptionHelpFormatter,
13 epilog=textwrap.dedent(
14 """\
15 instructions to run:
16 1. Install Xinference and Llama-cpp-python
17 2. Run 'xinference --host "localhost" --port 9997' in terminal
18 3. Run this python file in new terminal window
19
20 e.g. (feel free to copy)
21 python gradio_chatinterface.py \\
22 --endpoint http://localhost:9997 \\
23 --model_name vicuna-v1.3 \\
24 --model_size_in_billions 7 \\
25 --model_format ggmlv3 \\
26 --quantization q2_K
27
28 If you decide to change the port number in step 2,
29 please also change the endpoint in the arguments
30 """
31 ),
32 )
33
34 parser.add_argument(
35 "--endpoint", type=str, required=True, help="Xinference endpoint, required"
36 )
37 parser.add_argument(
38 "--model_name", type=str, required=True, help="Name of the model, required"
39 )
40 parser.add_argument(
41 "--model_size_in_billions",
42 type=int,
43 required=False,
44 help="Size of the model in billions",
45 )
46 parser.add_argument(
47 "--model_format",
48 type=str,
49 required=False,
50 help="Format of the model",
51 )
52 parser.add_argument(
53 "--quantization", type=str, required=False, help="Quantization of the model"
54 )
55
56 args = parser.parse_args()
57
58 endpoint = args.endpoint
59 model_name = args.model_name
60 model_size_in_billions = args.model_size_in_billions
61 model_format = args.model_format
62 quantization = args.quantization
63
64 print(f"Xinference endpoint: {endpoint}")
65 print(f"Model Name: {model_name}")
66 print(f"Model Size (in billions): {model_size_in_billions}")
67 print(f"Model Format: {model_format}")
68 print(f"Quantization: {quantization}")
69
70 client = Client(endpoint)
71 model_uid = client.launch_model(
72 model_name,
73 model_size_in_billions=model_size_in_billions,
74 model_format=model_format,
75 quantization=quantization,
76 n_ctx=2048,
77 )
78 model = client.get_model(model_uid)
79
80 def flatten(matrix: List[List[str]]) -> List[str]:
81 flat_list = []
82 for row in matrix:
83 flat_list += row
84 return flat_list
85
86 def to_chat(lst: List[str]) -> List[Dict[str, str]]:
87 res = []
88 for i in range(len(lst)):
89 role = "assistant" if i % 2 == 1 else "user"
90 res.append(
91 {
92 "role": role,
93 "content": lst[i],
94 }
95 )
96 return res
97
98 def generate_wrapper(message: str, history: List[List[str]]) -> str:
99 output = model.chat(
100 prompt=message,
101 chat_history=to_chat(flatten(history)),
102 generate_config={"max_tokens": 512, "stream": False},
103 )
104 return output["choices"][0]["message"]["content"]
105
106 demo = gr.ChatInterface(
107 fn=generate_wrapper,
108 examples=[
109 "Show me a two sentence horror story with a plot twist",
110 "Generate a Haiku poem using trignometry as the central theme",
111 "Write three sentences of scholarly description regarding a supernatural beast",
112 "Prove there does not exist a largest integer",
113 ],
114 title="Xinference Chat Bot",
115 )
116 demo.launch()
117
[end of examples/gradio_chatinterface.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/examples/gradio_chatinterface.py b/examples/gradio_chatinterface.py
--- a/examples/gradio_chatinterface.py
+++ b/examples/gradio_chatinterface.py
@@ -105,6 +105,7 @@
demo = gr.ChatInterface(
fn=generate_wrapper,
+ analytics_enabled=False,
examples=[
"Show me a two sentence horror story with a plot twist",
"Generate a Haiku poem using trignometry as the central theme",
| {"golden_diff": "diff --git a/examples/gradio_chatinterface.py b/examples/gradio_chatinterface.py\n--- a/examples/gradio_chatinterface.py\n+++ b/examples/gradio_chatinterface.py\n@@ -105,6 +105,7 @@\n \n demo = gr.ChatInterface(\n fn=generate_wrapper,\n+ analytics_enabled=False,\n examples=[\n \"Show me a two sentence horror story with a plot twist\",\n \"Generate a Haiku poem using trignometry as the central theme\",\n", "issue": "FEAT: Disable Gradio Telemetry\nPull requests are disabled but see here:\r\n\r\nhttps://github.com/arch-btw/inference/pull/1\n", "before_files": [{"content": "from typing import Dict, List\n\nimport gradio as gr\n\nfrom xinference.client import Client\n\nif __name__ == \"__main__\":\n import argparse\n import textwrap\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n epilog=textwrap.dedent(\n \"\"\"\\\n instructions to run:\n 1. Install Xinference and Llama-cpp-python\n 2. Run 'xinference --host \"localhost\" --port 9997' in terminal\n 3. Run this python file in new terminal window\n\n e.g. (feel free to copy)\n python gradio_chatinterface.py \\\\\n --endpoint http://localhost:9997 \\\\\n --model_name vicuna-v1.3 \\\\\n --model_size_in_billions 7 \\\\\n --model_format ggmlv3 \\\\\n --quantization q2_K\n\n If you decide to change the port number in step 2,\n please also change the endpoint in the arguments\n \"\"\"\n ),\n )\n\n parser.add_argument(\n \"--endpoint\", type=str, required=True, help=\"Xinference endpoint, required\"\n )\n parser.add_argument(\n \"--model_name\", type=str, required=True, help=\"Name of the model, required\"\n )\n parser.add_argument(\n \"--model_size_in_billions\",\n type=int,\n required=False,\n help=\"Size of the model in billions\",\n )\n parser.add_argument(\n \"--model_format\",\n type=str,\n required=False,\n help=\"Format of the model\",\n )\n parser.add_argument(\n \"--quantization\", type=str, required=False, help=\"Quantization of the model\"\n )\n\n args = parser.parse_args()\n\n endpoint = args.endpoint\n model_name = args.model_name\n model_size_in_billions = args.model_size_in_billions\n model_format = args.model_format\n quantization = args.quantization\n\n print(f\"Xinference endpoint: {endpoint}\")\n print(f\"Model Name: {model_name}\")\n print(f\"Model Size (in billions): {model_size_in_billions}\")\n print(f\"Model Format: {model_format}\")\n print(f\"Quantization: {quantization}\")\n\n client = Client(endpoint)\n model_uid = client.launch_model(\n model_name,\n model_size_in_billions=model_size_in_billions,\n model_format=model_format,\n quantization=quantization,\n n_ctx=2048,\n )\n model = client.get_model(model_uid)\n\n def flatten(matrix: List[List[str]]) -> List[str]:\n flat_list = []\n for row in matrix:\n flat_list += row\n return flat_list\n\n def to_chat(lst: List[str]) -> List[Dict[str, str]]:\n res = []\n for i in range(len(lst)):\n role = \"assistant\" if i % 2 == 1 else \"user\"\n res.append(\n {\n \"role\": role,\n \"content\": lst[i],\n }\n )\n return res\n\n def generate_wrapper(message: str, history: List[List[str]]) -> str:\n output = model.chat(\n prompt=message,\n chat_history=to_chat(flatten(history)),\n generate_config={\"max_tokens\": 512, \"stream\": False},\n )\n return output[\"choices\"][0][\"message\"][\"content\"]\n\n demo = gr.ChatInterface(\n fn=generate_wrapper,\n examples=[\n \"Show me a two sentence horror story with a plot twist\",\n \"Generate a Haiku poem using trignometry as the central theme\",\n \"Write three sentences of scholarly description regarding a supernatural beast\",\n \"Prove there does not exist a largest integer\",\n ],\n title=\"Xinference Chat Bot\",\n )\n demo.launch()\n", "path": "examples/gradio_chatinterface.py"}]} | 1,628 | 103 |
gh_patches_debug_16504 | rasdani/github-patches | git_diff | mampfes__hacs_waste_collection_schedule-1693 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Bug]: portenf_sa_gov_au reporting incorrect dates
### I Have A Problem With:
A specific source
### What's Your Problem
The portenf_sa_gov_au sensor has been reporting incorrectly since it updated itself on 24 December 2023 (I can see this from HA logs). It appears that when there is 1 week or less left in the month "main-month" switches to the coming month and "other-month" becomes the current month.
Because of this, the integration reports the current collection next month and the next collections as in the past (and hides them).
The fix in #1110 by @5ila5 partly addresses the problem but it was not foreseeable to him that EOM would be treated this way. @5ila5 also noted that this might be an issue in that closed issue.
### Source (if relevant)
portenf_sa_gov_au
### Logs
```Shell
Output of test_sources.py:
Testing source portenf_sa_gov_au ...
found 8 entries for Broadview, Regency Road, 565
2024-01-26 : general-waste bin [mdi:trash-can]
2024-01-26 : recycling bin [mdi:recycle]
2023-12-02 : general-waste bin [mdi:trash-can]
2023-12-02 : organics bin [mdi:leaf]
2023-12-09 : general-waste bin [mdi:trash-can]
2023-12-09 : recycling bin [mdi:recycle]
2023-12-16 : general-waste bin [mdi:trash-can]
2023-12-16 : organics bin [mdi:leaf]
found 8 entries for 48 Floriedale Rd
2024-01-26 : general-waste bin [mdi:trash-can]
2024-01-26 : recycling bin [mdi:recycle]
2023-12-02 : general-waste bin [mdi:trash-can]
2023-12-02 : organics bin [mdi:leaf]
2023-12-09 : general-waste bin [mdi:trash-can]
2023-12-09 : recycling bin [mdi:recycle]
2023-12-16 : general-waste bin [mdi:trash-can]
2023-12-16 : organics bin [mdi:leaf]
found 8 entries for 24 Margaret Terrace
2024-01-28 : general-waste bin [mdi:trash-can]
2024-01-28 : organics bin [mdi:leaf]
2023-12-04 : general-waste bin [mdi:trash-can]
2023-12-04 : recycling bin [mdi:recycle]
2023-12-11 : general-waste bin [mdi:trash-can]
2023-12-11 : organics bin [mdi:leaf]
2023-12-18 : general-waste bin [mdi:trash-can]
2023-12-18 : recycling bin [mdi:recycle]
found 8 entries for Addison Road 91 with unit
2024-01-28 : general-waste bin [mdi:trash-can]
2024-01-28 : organics bin [mdi:leaf]
2023-12-04 : general-waste bin [mdi:trash-can]
2023-12-04 : recycling bin [mdi:recycle]
2023-12-11 : general-waste bin [mdi:trash-can]
2023-12-11 : organics bin [mdi:leaf]
2023-12-18 : general-waste bin [mdi:trash-can]
2023-12-18 : recycling bin [mdi:recycle]
```
### Relevant Configuration
_No response_
### Checklist Source Error
- [X] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration)
- [X] Checked that the website of your service provider is still working
- [X] Tested my attributes on the service provider website (if possible)
- [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on "Redownload" and choose master as version)
### Checklist Sensor Error
- [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used)
### Required
- [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been.
- [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate.
</issue>
<code>
[start of custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py]
1 import logging
2 import re
3 from datetime import datetime
4
5 import requests
6 import urllib3
7 from bs4 import BeautifulSoup
8 from waste_collection_schedule import Collection # type: ignore[attr-defined]
9
10 # With verify=True the POST fails due to a SSLCertVerificationError.
11 # Using verify=False works, but is not ideal. The following links may provide a better way of dealing with this:
12 # https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
13 # https://urllib3.readthedocs.io/en/1.26.x/user-guide.html#ssl
14 # These two lines areused to suppress the InsecureRequestWarning when using verify=False
15 urllib3.disable_warnings()
16
17 TITLE = "Port Adelaide Enfield, South Australia"
18 DESCRIPTION = "Source for City of Port Adelaide Enfield, South Australia."
19 URL = "https://ecouncil.portenf.sa.gov.au/"
20 TEST_CASES = {
21 "Broadview, Regency Road, 565 ": {
22 "suburb": "Broadview",
23 "street": "Regency Road",
24 "house_number": 565,
25 "unit_number": "",
26 },
27 "48 Floriedale Rd ": {
28 "suburb": "Greenacres",
29 "street": "Floriedale Rd",
30 "house_number": "48",
31 },
32 "24 Margaret Terrace": {
33 "suburb": "Rosewater",
34 "street": "Margaret Terrace",
35 "house_number": "24",
36 },
37 "Addison Road 91 with unit": {
38 "suburb": "Rosewater",
39 "street": "Addison Road",
40 "house_number": 91,
41 "unit_number": 2,
42 },
43 }
44
45 ICON_MAP = {
46 "general-waste bin": "mdi:trash-can",
47 "organics bin": "mdi:leaf",
48 "recycling bin": "mdi:recycle",
49 }
50
51 LOGGER = logging.getLogger(__name__)
52
53 API_URL = "https://ecouncil.portenf.sa.gov.au/public/propertywastedates/public.aspx"
54
55
56 class Source:
57 def __init__(
58 self,
59 suburb: str,
60 street: str,
61 house_number: str | int,
62 unit_number: str | int = "",
63 ):
64 self._suburb: str = suburb
65 self._street: str = street
66 self._house_number: str = str(house_number)
67 self._unit_number: str = str(unit_number)
68
69 def __set_args(
70 self, soup: BeautifulSoup, event_taget=None, additional: dict = {}
71 ) -> dict:
72 args = {
73 "ctl00$MainContent$txtSuburb": self._suburb,
74 "ctl00$MainContent$txtStreetName": self._street,
75 "ctl00$MainContent$txtHouseNumber": self._house_number,
76 "ctl00$MainContent$txtUnitNumber": self._unit_number,
77 }
78 if event_taget is not None:
79 args["__EVENTTARGET"] = event_taget
80
81 for hidden_val in soup.find_all("input", {"type": "hidden"}):
82 args[hidden_val["name"]] = hidden_val["value"]
83
84 for key, value in additional.items():
85 args[key] = value
86 return args
87
88 def fetch(self):
89 session = requests.Session()
90
91 # get First page
92 r = session.get(API_URL, verify=False)
93 r.raise_for_status()
94
95 # extractt arguments
96 args = self.__set_args(
97 BeautifulSoup(r.text, "html.parser"),
98 event_taget="ctl00$MainContent$btnSearch",
99 )
100
101 r = session.post(API_URL, data=args)
102 r.raise_for_status()
103
104 # get page to select an address
105 soup = BeautifulSoup(r.text, "html.parser")
106
107 selectable = soup.find_all("a", {"class": "anchor-button small"}, text="Select")
108
109 if len(selectable) == 0:
110 raise ValueError("No address found")
111 selected = selectable[0]
112
113 # If multiple addresses are found, try to find the one that matches the input and warn if there are multiple or none matches
114 if len(selectable) > 1:
115 found = [
116 " ".join(
117 [y.text for y in x.parent.parent.find_all("td")[1].find_all("span")]
118 )
119 for x in selectable
120 ]
121 using_index = 0
122
123 match = False
124
125 for index, entry in enumerate(found):
126 entry = entry.lower().strip().replace(" ", "")
127 if (
128 self._house_number.lower().strip().replace(" ", "") in entry
129 and self._street.lower().strip().replace(" ", "") in entry
130 and self._suburb.lower().strip().replace(" ", "") in entry
131 and self._unit_number.lower().strip().replace(" ", "") in entry
132 ):
133 if match:
134 LOGGER.warning(
135 f"Multiple addresses found, using first one \nfound:{', '.join(found[:10])}{'...' if len(found) >= 10 else ''} \nusing:{found[using_index]}"
136 )
137 break
138 using_index = index
139 match = True
140 if not match:
141 LOGGER.warning(
142 f"no perfect address match found, using:{found[using_index]}"
143 )
144
145 # request first address
146 args = self.__set_args(
147 soup,
148 event_taget="ctl00$MainContent$gvPropertyResults$ctl02$btnSelect",
149 additional={selected["href"].split("'")[1]: ""},
150 )
151 r = session.post(API_URL, data=args)
152 r.raise_for_status()
153
154 soup = BeautifulSoup(r.text, "html.parser")
155 cal_header = soup.find("th", {"class": "header-month"}).find("span").text
156
157 from_month = cal_header.split("-")[0].strip()
158 to_month = cal_header.split("-")[1].strip().split(" ")[0]
159 to_year = from_year = cal_header.split("-")[1].strip().split(" ")[1]
160 # if main month contains a year, set it (maybe happens in december???)
161 if len(from_month.split(" ")) > 1:
162 from_year = from_month.split(" ")[1]
163 from_month = from_month.split(" ")[0]
164
165 today_div = soup.find("table", id="cal").find("td", class_="today")
166 print(today_div)
167
168 # if other-month is to_month
169 if (
170 "other-month" in today_div.attrs
171 and datetime.now().strftime("%B") != to_month
172 ):
173 main_month, other_month = from_month, to_month
174 main_year, other_year = from_year, to_year
175 else: # if other-month is from_month
176 main_month, other_month = to_month, from_month
177 main_year, other_year = to_year, from_year
178
179 entries = []
180
181 calendar = soup.find("table", {"class": "collection-day-calendar"})
182 # Iterate over all days with pickups
183 for pickup in calendar.find_all(
184 "div", {"class": re.compile(r"pickup|next-pickup")}
185 ):
186 parent_td = pickup.parent
187 month = (
188 main_month if "main-month" in parent_td.attrs["class"] else other_month
189 )
190 year = main_year if "main-month" in parent_td.attrs["class"] else other_year
191 day = parent_td.find("div", {"class": "daynumber"}).text
192
193 # Iterate over all pickup container types for this day
194 for container in pickup.find_all("div", {"class": "bin-container"}):
195 container_type = " ".join(container.find("div").attrs["class"])
196 container_icon = ICON_MAP.get(container_type)
197
198 date = datetime.strptime(f"{year}-{month}-{day}", "%Y-%B-%d").date()
199 entries.append(
200 Collection(date=date, t=container_type, icon=container_icon)
201 )
202
203 return entries
204
[end of custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py
--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py
+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py
@@ -163,12 +163,14 @@
from_month = from_month.split(" ")[0]
today_div = soup.find("table", id="cal").find("td", class_="today")
- print(today_div)
# if other-month is to_month
if (
- "other-month" in today_div.attrs
- and datetime.now().strftime("%B") != to_month
+ "other-month" in today_div.attrs["class"]
+ and datetime.now().strftime("%B") == to_month
+ ) or (
+ "main-month" in today_div.attrs["class"]
+ and datetime.now().strftime("%B") == from_month
):
main_month, other_month = from_month, to_month
main_year, other_year = from_year, to_year
| {"golden_diff": "diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py\n--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py\n+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py\n@@ -163,12 +163,14 @@\n from_month = from_month.split(\" \")[0]\n \n today_div = soup.find(\"table\", id=\"cal\").find(\"td\", class_=\"today\")\n- print(today_div)\n \n # if other-month is to_month\n if (\n- \"other-month\" in today_div.attrs\n- and datetime.now().strftime(\"%B\") != to_month\n+ \"other-month\" in today_div.attrs[\"class\"]\n+ and datetime.now().strftime(\"%B\") == to_month\n+ ) or (\n+ \"main-month\" in today_div.attrs[\"class\"]\n+ and datetime.now().strftime(\"%B\") == from_month\n ):\n main_month, other_month = from_month, to_month\n main_year, other_year = from_year, to_year\n", "issue": "[Bug]: portenf_sa_gov_au reporting incorrect dates\n### I Have A Problem With:\r\n\r\nA specific source\r\n\r\n### What's Your Problem\r\n\r\nThe portenf_sa_gov_au sensor has been reporting incorrectly since it updated itself on 24 December 2023 (I can see this from HA logs). It appears that when there is 1 week or less left in the month \"main-month\" switches to the coming month and \"other-month\" becomes the current month.\r\n\r\nBecause of this, the integration reports the current collection next month and the next collections as in the past (and hides them).\r\n\r\nThe fix in #1110 by @5ila5 partly addresses the problem but it was not foreseeable to him that EOM would be treated this way. @5ila5 also noted that this might be an issue in that closed issue.\r\n\r\n### Source (if relevant)\r\n\r\nportenf_sa_gov_au\r\n\r\n### Logs\r\n\r\n```Shell\r\nOutput of test_sources.py:\r\n\r\nTesting source portenf_sa_gov_au ...\r\n found 8 entries for Broadview, Regency Road, 565\r\n 2024-01-26 : general-waste bin [mdi:trash-can]\r\n 2024-01-26 : recycling bin [mdi:recycle]\r\n 2023-12-02 : general-waste bin [mdi:trash-can]\r\n 2023-12-02 : organics bin [mdi:leaf]\r\n 2023-12-09 : general-waste bin [mdi:trash-can]\r\n 2023-12-09 : recycling bin [mdi:recycle]\r\n 2023-12-16 : general-waste bin [mdi:trash-can]\r\n 2023-12-16 : organics bin [mdi:leaf]\r\n found 8 entries for 48 Floriedale Rd\r\n 2024-01-26 : general-waste bin [mdi:trash-can]\r\n 2024-01-26 : recycling bin [mdi:recycle]\r\n 2023-12-02 : general-waste bin [mdi:trash-can]\r\n 2023-12-02 : organics bin [mdi:leaf]\r\n 2023-12-09 : general-waste bin [mdi:trash-can]\r\n 2023-12-09 : recycling bin [mdi:recycle]\r\n 2023-12-16 : general-waste bin [mdi:trash-can]\r\n 2023-12-16 : organics bin [mdi:leaf]\r\n found 8 entries for 24 Margaret Terrace\r\n 2024-01-28 : general-waste bin [mdi:trash-can]\r\n 2024-01-28 : organics bin [mdi:leaf]\r\n 2023-12-04 : general-waste bin [mdi:trash-can]\r\n 2023-12-04 : recycling bin [mdi:recycle]\r\n 2023-12-11 : general-waste bin [mdi:trash-can]\r\n 2023-12-11 : organics bin [mdi:leaf]\r\n 2023-12-18 : general-waste bin [mdi:trash-can]\r\n 2023-12-18 : recycling bin [mdi:recycle]\r\n found 8 entries for Addison Road 91 with unit\r\n 2024-01-28 : general-waste bin [mdi:trash-can]\r\n 2024-01-28 : organics bin [mdi:leaf]\r\n 2023-12-04 : general-waste bin [mdi:trash-can]\r\n 2023-12-04 : recycling bin [mdi:recycle]\r\n 2023-12-11 : general-waste bin [mdi:trash-can]\r\n 2023-12-11 : organics bin [mdi:leaf]\r\n 2023-12-18 : general-waste bin [mdi:trash-can]\r\n 2023-12-18 : recycling bin [mdi:recycle]\r\n```\r\n\r\n\r\n### Relevant Configuration\r\n\r\n_No response_\r\n\r\n### Checklist Source Error\r\n\r\n- [X] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration)\r\n- [X] Checked that the website of your service provider is still working\r\n- [X] Tested my attributes on the service provider website (if possible)\r\n- [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on \"Redownload\" and choose master as version)\r\n\r\n### Checklist Sensor Error\r\n\r\n- [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used)\r\n\r\n### Required\r\n\r\n- [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been.\r\n- [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate.\n", "before_files": [{"content": "import logging\nimport re\nfrom datetime import datetime\n\nimport requests\nimport urllib3\nfrom bs4 import BeautifulSoup\nfrom waste_collection_schedule import Collection # type: ignore[attr-defined]\n\n# With verify=True the POST fails due to a SSLCertVerificationError.\n# Using verify=False works, but is not ideal. The following links may provide a better way of dealing with this:\n# https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings\n# https://urllib3.readthedocs.io/en/1.26.x/user-guide.html#ssl\n# These two lines areused to suppress the InsecureRequestWarning when using verify=False\nurllib3.disable_warnings()\n\nTITLE = \"Port Adelaide Enfield, South Australia\"\nDESCRIPTION = \"Source for City of Port Adelaide Enfield, South Australia.\"\nURL = \"https://ecouncil.portenf.sa.gov.au/\"\nTEST_CASES = {\n \"Broadview, Regency Road, 565 \": {\n \"suburb\": \"Broadview\",\n \"street\": \"Regency Road\",\n \"house_number\": 565,\n \"unit_number\": \"\",\n },\n \"48 Floriedale Rd \": {\n \"suburb\": \"Greenacres\",\n \"street\": \"Floriedale Rd\",\n \"house_number\": \"48\",\n },\n \"24 Margaret Terrace\": {\n \"suburb\": \"Rosewater\",\n \"street\": \"Margaret Terrace\",\n \"house_number\": \"24\",\n },\n \"Addison Road 91 with unit\": {\n \"suburb\": \"Rosewater\",\n \"street\": \"Addison Road\",\n \"house_number\": 91,\n \"unit_number\": 2,\n },\n}\n\nICON_MAP = {\n \"general-waste bin\": \"mdi:trash-can\",\n \"organics bin\": \"mdi:leaf\",\n \"recycling bin\": \"mdi:recycle\",\n}\n\nLOGGER = logging.getLogger(__name__)\n\nAPI_URL = \"https://ecouncil.portenf.sa.gov.au/public/propertywastedates/public.aspx\"\n\n\nclass Source:\n def __init__(\n self,\n suburb: str,\n street: str,\n house_number: str | int,\n unit_number: str | int = \"\",\n ):\n self._suburb: str = suburb\n self._street: str = street\n self._house_number: str = str(house_number)\n self._unit_number: str = str(unit_number)\n\n def __set_args(\n self, soup: BeautifulSoup, event_taget=None, additional: dict = {}\n ) -> dict:\n args = {\n \"ctl00$MainContent$txtSuburb\": self._suburb,\n \"ctl00$MainContent$txtStreetName\": self._street,\n \"ctl00$MainContent$txtHouseNumber\": self._house_number,\n \"ctl00$MainContent$txtUnitNumber\": self._unit_number,\n }\n if event_taget is not None:\n args[\"__EVENTTARGET\"] = event_taget\n\n for hidden_val in soup.find_all(\"input\", {\"type\": \"hidden\"}):\n args[hidden_val[\"name\"]] = hidden_val[\"value\"]\n\n for key, value in additional.items():\n args[key] = value\n return args\n\n def fetch(self):\n session = requests.Session()\n\n # get First page\n r = session.get(API_URL, verify=False)\n r.raise_for_status()\n\n # extractt arguments\n args = self.__set_args(\n BeautifulSoup(r.text, \"html.parser\"),\n event_taget=\"ctl00$MainContent$btnSearch\",\n )\n\n r = session.post(API_URL, data=args)\n r.raise_for_status()\n\n # get page to select an address\n soup = BeautifulSoup(r.text, \"html.parser\")\n\n selectable = soup.find_all(\"a\", {\"class\": \"anchor-button small\"}, text=\"Select\")\n\n if len(selectable) == 0:\n raise ValueError(\"No address found\")\n selected = selectable[0]\n\n # If multiple addresses are found, try to find the one that matches the input and warn if there are multiple or none matches\n if len(selectable) > 1:\n found = [\n \" \".join(\n [y.text for y in x.parent.parent.find_all(\"td\")[1].find_all(\"span\")]\n )\n for x in selectable\n ]\n using_index = 0\n\n match = False\n\n for index, entry in enumerate(found):\n entry = entry.lower().strip().replace(\" \", \"\")\n if (\n self._house_number.lower().strip().replace(\" \", \"\") in entry\n and self._street.lower().strip().replace(\" \", \"\") in entry\n and self._suburb.lower().strip().replace(\" \", \"\") in entry\n and self._unit_number.lower().strip().replace(\" \", \"\") in entry\n ):\n if match:\n LOGGER.warning(\n f\"Multiple addresses found, using first one \\nfound:{', '.join(found[:10])}{'...' if len(found) >= 10 else ''} \\nusing:{found[using_index]}\"\n )\n break\n using_index = index\n match = True\n if not match:\n LOGGER.warning(\n f\"no perfect address match found, using:{found[using_index]}\"\n )\n\n # request first address\n args = self.__set_args(\n soup,\n event_taget=\"ctl00$MainContent$gvPropertyResults$ctl02$btnSelect\",\n additional={selected[\"href\"].split(\"'\")[1]: \"\"},\n )\n r = session.post(API_URL, data=args)\n r.raise_for_status()\n\n soup = BeautifulSoup(r.text, \"html.parser\")\n cal_header = soup.find(\"th\", {\"class\": \"header-month\"}).find(\"span\").text\n\n from_month = cal_header.split(\"-\")[0].strip()\n to_month = cal_header.split(\"-\")[1].strip().split(\" \")[0]\n to_year = from_year = cal_header.split(\"-\")[1].strip().split(\" \")[1]\n # if main month contains a year, set it (maybe happens in december???)\n if len(from_month.split(\" \")) > 1:\n from_year = from_month.split(\" \")[1]\n from_month = from_month.split(\" \")[0]\n\n today_div = soup.find(\"table\", id=\"cal\").find(\"td\", class_=\"today\")\n print(today_div)\n\n # if other-month is to_month\n if (\n \"other-month\" in today_div.attrs\n and datetime.now().strftime(\"%B\") != to_month\n ):\n main_month, other_month = from_month, to_month\n main_year, other_year = from_year, to_year\n else: # if other-month is from_month\n main_month, other_month = to_month, from_month\n main_year, other_year = to_year, from_year\n\n entries = []\n\n calendar = soup.find(\"table\", {\"class\": \"collection-day-calendar\"})\n # Iterate over all days with pickups\n for pickup in calendar.find_all(\n \"div\", {\"class\": re.compile(r\"pickup|next-pickup\")}\n ):\n parent_td = pickup.parent\n month = (\n main_month if \"main-month\" in parent_td.attrs[\"class\"] else other_month\n )\n year = main_year if \"main-month\" in parent_td.attrs[\"class\"] else other_year\n day = parent_td.find(\"div\", {\"class\": \"daynumber\"}).text\n\n # Iterate over all pickup container types for this day\n for container in pickup.find_all(\"div\", {\"class\": \"bin-container\"}):\n container_type = \" \".join(container.find(\"div\").attrs[\"class\"])\n container_icon = ICON_MAP.get(container_type)\n\n date = datetime.strptime(f\"{year}-{month}-{day}\", \"%Y-%B-%d\").date()\n entries.append(\n Collection(date=date, t=container_type, icon=container_icon)\n )\n\n return entries\n", "path": "custom_components/waste_collection_schedule/waste_collection_schedule/source/portenf_sa_gov_au.py"}]} | 4,010 | 273 |
gh_patches_debug_4863 | rasdani/github-patches | git_diff | digitalfabrik__integreat-cms-1210 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PDF Export URL pattern
### Describe the Bug
The web app calls `/REGION/LANG/wp-json/ig-mpdf/v1/pdf` to export a PDF which returns a 404. Our API currently uses `REGION/LANG/pdf`.
The normal mapping does not work, as we
### Steps to Reproduce
```shell
curl 'https://malte-test.tuerantuer.org/joerdenstorf/de/wp-json/ig-mpdf/v1/pdf'
```
### Expected Behavior
Map old URL pattern to new endpoint.
### Actual Behavior
404
</issue>
<code>
[start of integreat_cms/api/urls.py]
1 """
2 Expansion of API-Endpoints for the CMS
3 """
4 from django.urls import include, path, re_path
5
6 from .v3.events import events
7 from .v3.feedback import (
8 page_feedback,
9 search_result_feedback,
10 region_feedback,
11 offer_feedback,
12 offer_list_feedback,
13 event_list_feedback,
14 event_feedback,
15 poi_feedback,
16 map_feedback,
17 imprint_page_feedback,
18 legacy_feedback_endpoint,
19 )
20 from .v3.imprint import imprint
21 from .v3.languages import languages
22 from .v3.locations import locations
23 from .v3.pages import pages, children, parents, single_page
24 from .v3.pdf_export import pdf_export
25 from .v3.push_notifications import sent_push_notifications
26 from .v3.regions import regions, liveregions, hiddenregions
27 from .v3.offers import offers
28
29
30 #: The namespace for this URL config (see :attr:`django.urls.ResolverMatch.app_name`)
31 app_name = "api"
32
33 content_api_urlpatterns = [
34 path("pages/", pages, name="pages"),
35 path("locations/", locations, name="locations"),
36 path("events/", events, name="events"),
37 path("page/", single_page, name="single_page"),
38 path("post/", single_page, name="single_page"),
39 path("children/", children, name="children"),
40 path("parents/", parents, name="parents"),
41 path("pdf/", pdf_export, name="pdf_export"),
42 path(
43 "sent_push_notifications/",
44 sent_push_notifications,
45 name="sent_push_notifications",
46 ),
47 path("imprint/", imprint, name="imprint"),
48 path("disclaimer/", imprint, name="imprint"),
49 path("offers/", offers, name="offers"),
50 path("extras/", offers, name="offers"),
51 re_path(
52 r"^feedback/?$",
53 legacy_feedback_endpoint.legacy_feedback_endpoint,
54 name="legacy_feedback_endpoint",
55 ),
56 path(
57 "feedback/",
58 include(
59 [
60 re_path(
61 r"^categories/?$",
62 region_feedback.region_feedback,
63 name="region_feedback",
64 ),
65 re_path(r"^page/?$", page_feedback.page_feedback, name="page_feedback"),
66 re_path(r"^poi/?$", poi_feedback.poi_feedback, name="poi_feedback"),
67 re_path(
68 r"^event/?$", event_feedback.event_feedback, name="event_feedback"
69 ),
70 re_path(
71 r"^events/?$",
72 event_list_feedback.event_list_feedback,
73 name="event_list_feedback",
74 ),
75 re_path(
76 r"^imprint-page/?$",
77 imprint_page_feedback.imprint_page_feedback,
78 name="imprint_page_feedbacks",
79 ),
80 re_path(r"^map/?$", map_feedback.map_feedback, name="map_feedback"),
81 re_path(
82 r"^search/?$",
83 search_result_feedback.search_result_feedback,
84 name="search_result_feedback",
85 ),
86 re_path(
87 r"^offers/?$",
88 offer_list_feedback.offer_list_feedback,
89 name="offer_list_feedback",
90 ),
91 re_path(
92 r"^extras/?$",
93 offer_list_feedback.offer_list_feedback,
94 name="offer_list_feedback",
95 ),
96 re_path(
97 r"^offer/?$", offer_feedback.offer_feedback, name="offer_feedback"
98 ),
99 re_path(
100 r"^extra/?$", offer_feedback.offer_feedback, name="offer_feedback"
101 ),
102 ]
103 ),
104 ),
105 ]
106
107 region_api_urlpatterns = [
108 path("", regions, name="regions"),
109 path("live/", liveregions, name="regions_live"),
110 path("hidden/", hiddenregions, name="regions_hidden"),
111 ]
112
113 #: The url patterns of this module (see :doc:`topics/http/urls`)
114 urlpatterns = [
115 path("api/regions/", include(region_api_urlpatterns)),
116 path("wp-json/extensions/v3/sites/", include(region_api_urlpatterns)),
117 path(
118 "api/<slug:region_slug>/",
119 include(
120 [
121 path("languages/", languages, name="languages"),
122 path("offers/", offers, name="offers"),
123 path("extras/", offers, name="offers"),
124 path("<slug:language_slug>/", include(content_api_urlpatterns)),
125 ]
126 ),
127 ),
128 path(
129 "<slug:region_slug>/",
130 include(
131 [
132 path(
133 "de/wp-json/extensions/v3/languages/", languages, name="languages"
134 ),
135 path(
136 "<slug:language_slug>/wp-json/extensions/v3/",
137 include(content_api_urlpatterns),
138 ),
139 ]
140 ),
141 ),
142 ]
143
[end of integreat_cms/api/urls.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/integreat_cms/api/urls.py b/integreat_cms/api/urls.py
--- a/integreat_cms/api/urls.py
+++ b/integreat_cms/api/urls.py
@@ -136,6 +136,11 @@
"<slug:language_slug>/wp-json/extensions/v3/",
include(content_api_urlpatterns),
),
+ path(
+ "<slug:language_slug>/wp-json/ig-mpdf/v1/pdf/",
+ pdf_export,
+ name="pdf_export",
+ ),
]
),
),
| {"golden_diff": "diff --git a/integreat_cms/api/urls.py b/integreat_cms/api/urls.py\n--- a/integreat_cms/api/urls.py\n+++ b/integreat_cms/api/urls.py\n@@ -136,6 +136,11 @@\n \"<slug:language_slug>/wp-json/extensions/v3/\",\n include(content_api_urlpatterns),\n ),\n+ path(\n+ \"<slug:language_slug>/wp-json/ig-mpdf/v1/pdf/\",\n+ pdf_export,\n+ name=\"pdf_export\",\n+ ),\n ]\n ),\n ),\n", "issue": "PDF Export URL pattern\n### Describe the Bug\r\nThe web app calls `/REGION/LANG/wp-json/ig-mpdf/v1/pdf` to export a PDF which returns a 404. Our API currently uses `REGION/LANG/pdf`.\r\n\r\nThe normal mapping does not work, as we\r\n\r\n### Steps to Reproduce\r\n\r\n```shell\r\ncurl 'https://malte-test.tuerantuer.org/joerdenstorf/de/wp-json/ig-mpdf/v1/pdf'\r\n```\r\n\r\n### Expected Behavior\r\nMap old URL pattern to new endpoint.\r\n\r\n\r\n### Actual Behavior\r\n404\n", "before_files": [{"content": "\"\"\"\nExpansion of API-Endpoints for the CMS\n\"\"\"\nfrom django.urls import include, path, re_path\n\nfrom .v3.events import events\nfrom .v3.feedback import (\n page_feedback,\n search_result_feedback,\n region_feedback,\n offer_feedback,\n offer_list_feedback,\n event_list_feedback,\n event_feedback,\n poi_feedback,\n map_feedback,\n imprint_page_feedback,\n legacy_feedback_endpoint,\n)\nfrom .v3.imprint import imprint\nfrom .v3.languages import languages\nfrom .v3.locations import locations\nfrom .v3.pages import pages, children, parents, single_page\nfrom .v3.pdf_export import pdf_export\nfrom .v3.push_notifications import sent_push_notifications\nfrom .v3.regions import regions, liveregions, hiddenregions\nfrom .v3.offers import offers\n\n\n#: The namespace for this URL config (see :attr:`django.urls.ResolverMatch.app_name`)\napp_name = \"api\"\n\ncontent_api_urlpatterns = [\n path(\"pages/\", pages, name=\"pages\"),\n path(\"locations/\", locations, name=\"locations\"),\n path(\"events/\", events, name=\"events\"),\n path(\"page/\", single_page, name=\"single_page\"),\n path(\"post/\", single_page, name=\"single_page\"),\n path(\"children/\", children, name=\"children\"),\n path(\"parents/\", parents, name=\"parents\"),\n path(\"pdf/\", pdf_export, name=\"pdf_export\"),\n path(\n \"sent_push_notifications/\",\n sent_push_notifications,\n name=\"sent_push_notifications\",\n ),\n path(\"imprint/\", imprint, name=\"imprint\"),\n path(\"disclaimer/\", imprint, name=\"imprint\"),\n path(\"offers/\", offers, name=\"offers\"),\n path(\"extras/\", offers, name=\"offers\"),\n re_path(\n r\"^feedback/?$\",\n legacy_feedback_endpoint.legacy_feedback_endpoint,\n name=\"legacy_feedback_endpoint\",\n ),\n path(\n \"feedback/\",\n include(\n [\n re_path(\n r\"^categories/?$\",\n region_feedback.region_feedback,\n name=\"region_feedback\",\n ),\n re_path(r\"^page/?$\", page_feedback.page_feedback, name=\"page_feedback\"),\n re_path(r\"^poi/?$\", poi_feedback.poi_feedback, name=\"poi_feedback\"),\n re_path(\n r\"^event/?$\", event_feedback.event_feedback, name=\"event_feedback\"\n ),\n re_path(\n r\"^events/?$\",\n event_list_feedback.event_list_feedback,\n name=\"event_list_feedback\",\n ),\n re_path(\n r\"^imprint-page/?$\",\n imprint_page_feedback.imprint_page_feedback,\n name=\"imprint_page_feedbacks\",\n ),\n re_path(r\"^map/?$\", map_feedback.map_feedback, name=\"map_feedback\"),\n re_path(\n r\"^search/?$\",\n search_result_feedback.search_result_feedback,\n name=\"search_result_feedback\",\n ),\n re_path(\n r\"^offers/?$\",\n offer_list_feedback.offer_list_feedback,\n name=\"offer_list_feedback\",\n ),\n re_path(\n r\"^extras/?$\",\n offer_list_feedback.offer_list_feedback,\n name=\"offer_list_feedback\",\n ),\n re_path(\n r\"^offer/?$\", offer_feedback.offer_feedback, name=\"offer_feedback\"\n ),\n re_path(\n r\"^extra/?$\", offer_feedback.offer_feedback, name=\"offer_feedback\"\n ),\n ]\n ),\n ),\n]\n\nregion_api_urlpatterns = [\n path(\"\", regions, name=\"regions\"),\n path(\"live/\", liveregions, name=\"regions_live\"),\n path(\"hidden/\", hiddenregions, name=\"regions_hidden\"),\n]\n\n#: The url patterns of this module (see :doc:`topics/http/urls`)\nurlpatterns = [\n path(\"api/regions/\", include(region_api_urlpatterns)),\n path(\"wp-json/extensions/v3/sites/\", include(region_api_urlpatterns)),\n path(\n \"api/<slug:region_slug>/\",\n include(\n [\n path(\"languages/\", languages, name=\"languages\"),\n path(\"offers/\", offers, name=\"offers\"),\n path(\"extras/\", offers, name=\"offers\"),\n path(\"<slug:language_slug>/\", include(content_api_urlpatterns)),\n ]\n ),\n ),\n path(\n \"<slug:region_slug>/\",\n include(\n [\n path(\n \"de/wp-json/extensions/v3/languages/\", languages, name=\"languages\"\n ),\n path(\n \"<slug:language_slug>/wp-json/extensions/v3/\",\n include(content_api_urlpatterns),\n ),\n ]\n ),\n ),\n]\n", "path": "integreat_cms/api/urls.py"}]} | 1,934 | 129 |
gh_patches_debug_29434 | rasdani/github-patches | git_diff | plone__Products.CMFPlone-1515 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Resources from third party add-ons are not being included in compiled plone-legacy bundle
Seems JS resources registered in Plone 5 using old approach (`jsregistry.xml`) are not included in the final compilation: I installed an add-on and, even as I can see the JS resources listed in `default.js`, the source code is not present.
If I enable development mode, then I can see the source code included in `plone-legacy-compiled.js` and it's executed normally.
</issue>
<code>
[start of Products/CMFPlone/resources/browser/combine.py]
1 from zExceptions import NotFound
2 from Acquisition import aq_base
3 from datetime import datetime
4 from plone.registry.interfaces import IRegistry
5 from plone.resource.file import FilesystemFile
6 from plone.resource.interfaces import IResourceDirectory
7 from Products.CMFPlone.interfaces import IBundleRegistry
8 from Products.CMFPlone.interfaces.resources import (
9 OVERRIDE_RESOURCE_DIRECTORY_NAME,
10 )
11 from StringIO import StringIO
12 from zope.component import getUtility
13 from zope.component import queryUtility
14
15 PRODUCTION_RESOURCE_DIRECTORY = "production"
16
17
18 def get_production_resource_directory():
19 persistent_directory = queryUtility(IResourceDirectory, name="persistent")
20 if persistent_directory is None:
21 return ''
22 container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]
23 try:
24 production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]
25 except NotFound:
26 return "%s/++unique++1" % PRODUCTION_RESOURCE_DIRECTORY
27 timestamp = production_folder.readFile('timestamp.txt')
28 return "%s/++unique++%s" % (
29 PRODUCTION_RESOURCE_DIRECTORY, timestamp)
30
31
32 def get_resource(context, path):
33 resource = context.unrestrictedTraverse(path)
34 if isinstance(resource, FilesystemFile):
35 (directory, sep, filename) = path.rpartition('/')
36 return context.unrestrictedTraverse(directory).readFile(filename)
37 else:
38 if hasattr(aq_base(resource), 'GET'):
39 # for FileResource
40 return resource.GET()
41 else:
42 # any BrowserView
43 return resource()
44
45
46 def write_js(context, folder, meta_bundle):
47 registry = getUtility(IRegistry)
48 resources = []
49
50 # default resources
51 if meta_bundle == 'default' and registry.records.get(
52 'plone.resources/jquery.js'
53 ):
54 resources.append(get_resource(context,
55 registry.records['plone.resources/jquery.js'].value))
56 resources.append(get_resource(context,
57 registry.records['plone.resources.requirejs'].value))
58 resources.append(get_resource(context,
59 registry.records['plone.resources.configjs'].value))
60
61 # bundles
62 bundles = registry.collectionOfInterface(
63 IBundleRegistry, prefix="plone.bundles", check=False)
64 for bundle in bundles.values():
65 if bundle.merge_with == meta_bundle:
66 resources.append(get_resource(context, bundle.jscompilation))
67
68 fi = StringIO()
69 for script in resources:
70 fi.write(script + '\n')
71 folder.writeFile(meta_bundle + ".js", fi)
72
73
74 def write_css(context, folder, meta_bundle):
75 registry = getUtility(IRegistry)
76 resources = []
77
78 bundles = registry.collectionOfInterface(
79 IBundleRegistry, prefix="plone.bundles", check=False)
80 for bundle in bundles.values():
81 if bundle.merge_with == meta_bundle:
82 resources.append(get_resource(context, bundle.csscompilation))
83
84 fi = StringIO()
85 for script in resources:
86 fi.write(script + '\n')
87 folder.writeFile(meta_bundle + ".css", fi)
88
89
90 def combine_bundles(context):
91 persistent_directory = queryUtility(IResourceDirectory, name="persistent")
92 if persistent_directory is None:
93 return
94 if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory:
95 persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME)
96 container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]
97 if PRODUCTION_RESOURCE_DIRECTORY not in container:
98 container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY)
99 production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]
100
101 # store timestamp
102 fi = StringIO()
103 fi.write(datetime.now().isoformat())
104 production_folder.writeFile("timestamp.txt", fi)
105
106 # generate new combined bundles
107 write_js(context, production_folder, 'default')
108 write_js(context, production_folder, 'logged-in')
109 write_css(context, production_folder, 'default')
110 write_css(context, production_folder, 'logged-in')
111
[end of Products/CMFPlone/resources/browser/combine.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/Products/CMFPlone/resources/browser/combine.py b/Products/CMFPlone/resources/browser/combine.py
--- a/Products/CMFPlone/resources/browser/combine.py
+++ b/Products/CMFPlone/resources/browser/combine.py
@@ -30,6 +30,14 @@
def get_resource(context, path):
+ if path.startswith('++plone++'):
+ # ++plone++ resources can be customized, we return their override
+ # value if any
+ overrides = get_override_directory(context)
+ filepath = path[9:]
+ if overrides.isFile(filepath):
+ return overrides.readFile(filepath)
+
resource = context.unrestrictedTraverse(path)
if isinstance(resource, FilesystemFile):
(directory, sep, filename) = path.rpartition('/')
@@ -87,13 +95,17 @@
folder.writeFile(meta_bundle + ".css", fi)
-def combine_bundles(context):
+def get_override_directory(context):
persistent_directory = queryUtility(IResourceDirectory, name="persistent")
if persistent_directory is None:
return
if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory:
persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME)
- container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]
+ return persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]
+
+
+def combine_bundles(context):
+ container = get_override_directory(context)
if PRODUCTION_RESOURCE_DIRECTORY not in container:
container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY)
production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]
| {"golden_diff": "diff --git a/Products/CMFPlone/resources/browser/combine.py b/Products/CMFPlone/resources/browser/combine.py\n--- a/Products/CMFPlone/resources/browser/combine.py\n+++ b/Products/CMFPlone/resources/browser/combine.py\n@@ -30,6 +30,14 @@\n \n \n def get_resource(context, path):\n+ if path.startswith('++plone++'):\n+ # ++plone++ resources can be customized, we return their override\n+ # value if any\n+ overrides = get_override_directory(context)\n+ filepath = path[9:]\n+ if overrides.isFile(filepath):\n+ return overrides.readFile(filepath)\n+\n resource = context.unrestrictedTraverse(path)\n if isinstance(resource, FilesystemFile):\n (directory, sep, filename) = path.rpartition('/')\n@@ -87,13 +95,17 @@\n folder.writeFile(meta_bundle + \".css\", fi)\n \n \n-def combine_bundles(context):\n+def get_override_directory(context):\n persistent_directory = queryUtility(IResourceDirectory, name=\"persistent\")\n if persistent_directory is None:\n return\n if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory:\n persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME)\n- container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n+ return persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n+\n+\n+def combine_bundles(context):\n+ container = get_override_directory(context)\n if PRODUCTION_RESOURCE_DIRECTORY not in container:\n container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY)\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n", "issue": "Resources from third party add-ons are not being included in compiled plone-legacy bundle\nSeems JS resources registered in Plone 5 using old approach (`jsregistry.xml`) are not included in the final compilation: I installed an add-on and, even as I can see the JS resources listed in `default.js`, the source code is not present.\n\nIf I enable development mode, then I can see the source code included in `plone-legacy-compiled.js` and it's executed normally.\n\n", "before_files": [{"content": "from zExceptions import NotFound\nfrom Acquisition import aq_base\nfrom datetime import datetime\nfrom plone.registry.interfaces import IRegistry\nfrom plone.resource.file import FilesystemFile\nfrom plone.resource.interfaces import IResourceDirectory\nfrom Products.CMFPlone.interfaces import IBundleRegistry\nfrom Products.CMFPlone.interfaces.resources import (\n OVERRIDE_RESOURCE_DIRECTORY_NAME,\n)\nfrom StringIO import StringIO\nfrom zope.component import getUtility\nfrom zope.component import queryUtility\n\nPRODUCTION_RESOURCE_DIRECTORY = \"production\"\n\n\ndef get_production_resource_directory():\n persistent_directory = queryUtility(IResourceDirectory, name=\"persistent\")\n if persistent_directory is None:\n return ''\n container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n try:\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n except NotFound:\n return \"%s/++unique++1\" % PRODUCTION_RESOURCE_DIRECTORY\n timestamp = production_folder.readFile('timestamp.txt')\n return \"%s/++unique++%s\" % (\n PRODUCTION_RESOURCE_DIRECTORY, timestamp)\n\n\ndef get_resource(context, path):\n resource = context.unrestrictedTraverse(path)\n if isinstance(resource, FilesystemFile):\n (directory, sep, filename) = path.rpartition('/')\n return context.unrestrictedTraverse(directory).readFile(filename)\n else:\n if hasattr(aq_base(resource), 'GET'):\n # for FileResource\n return resource.GET()\n else:\n # any BrowserView\n return resource()\n\n\ndef write_js(context, folder, meta_bundle):\n registry = getUtility(IRegistry)\n resources = []\n\n # default resources\n if meta_bundle == 'default' and registry.records.get(\n 'plone.resources/jquery.js'\n ):\n resources.append(get_resource(context,\n registry.records['plone.resources/jquery.js'].value))\n resources.append(get_resource(context,\n registry.records['plone.resources.requirejs'].value))\n resources.append(get_resource(context,\n registry.records['plone.resources.configjs'].value))\n\n # bundles\n bundles = registry.collectionOfInterface(\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle:\n resources.append(get_resource(context, bundle.jscompilation))\n\n fi = StringIO()\n for script in resources:\n fi.write(script + '\\n')\n folder.writeFile(meta_bundle + \".js\", fi)\n\n\ndef write_css(context, folder, meta_bundle):\n registry = getUtility(IRegistry)\n resources = []\n\n bundles = registry.collectionOfInterface(\n IBundleRegistry, prefix=\"plone.bundles\", check=False)\n for bundle in bundles.values():\n if bundle.merge_with == meta_bundle:\n resources.append(get_resource(context, bundle.csscompilation))\n\n fi = StringIO()\n for script in resources:\n fi.write(script + '\\n')\n folder.writeFile(meta_bundle + \".css\", fi)\n\n\ndef combine_bundles(context):\n persistent_directory = queryUtility(IResourceDirectory, name=\"persistent\")\n if persistent_directory is None:\n return\n if OVERRIDE_RESOURCE_DIRECTORY_NAME not in persistent_directory:\n persistent_directory.makeDirectory(OVERRIDE_RESOURCE_DIRECTORY_NAME)\n container = persistent_directory[OVERRIDE_RESOURCE_DIRECTORY_NAME]\n if PRODUCTION_RESOURCE_DIRECTORY not in container:\n container.makeDirectory(PRODUCTION_RESOURCE_DIRECTORY)\n production_folder = container[PRODUCTION_RESOURCE_DIRECTORY]\n\n # store timestamp\n fi = StringIO()\n fi.write(datetime.now().isoformat())\n production_folder.writeFile(\"timestamp.txt\", fi)\n\n # generate new combined bundles\n write_js(context, production_folder, 'default')\n write_js(context, production_folder, 'logged-in')\n write_css(context, production_folder, 'default')\n write_css(context, production_folder, 'logged-in')\n", "path": "Products/CMFPlone/resources/browser/combine.py"}]} | 1,665 | 338 |
gh_patches_debug_22011 | rasdani/github-patches | git_diff | docker__docker-py-1330 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add docker network IPAM options parameter
IPAM driver missing options
supports an options field in the IPAM config
It introduced in API v1.22.
```
POST /networks/create Now supports an options field in the IPAM config that provides options for custom IPAM plugins.
```
</issue>
<code>
[start of docker/types/networks.py]
1 from .. import errors
2 from ..utils import normalize_links, version_lt
3
4
5 class EndpointConfig(dict):
6 def __init__(self, version, aliases=None, links=None, ipv4_address=None,
7 ipv6_address=None, link_local_ips=None):
8 if version_lt(version, '1.22'):
9 raise errors.InvalidVersion(
10 'Endpoint config is not supported for API version < 1.22'
11 )
12
13 if aliases:
14 self["Aliases"] = aliases
15
16 if links:
17 self["Links"] = normalize_links(links)
18
19 ipam_config = {}
20 if ipv4_address:
21 ipam_config['IPv4Address'] = ipv4_address
22
23 if ipv6_address:
24 ipam_config['IPv6Address'] = ipv6_address
25
26 if link_local_ips is not None:
27 if version_lt(version, '1.24'):
28 raise errors.InvalidVersion(
29 'link_local_ips is not supported for API version < 1.24'
30 )
31 ipam_config['LinkLocalIPs'] = link_local_ips
32
33 if ipam_config:
34 self['IPAMConfig'] = ipam_config
35
36
37 class NetworkingConfig(dict):
38 def __init__(self, endpoints_config=None):
39 if endpoints_config:
40 self["EndpointsConfig"] = endpoints_config
41
42
43 class IPAMConfig(dict):
44 """
45 Create an IPAM (IP Address Management) config dictionary to be used with
46 :py:meth:`~docker.api.network.NetworkApiMixin.create_network`.
47
48 Args:
49
50 driver (str): The IPAM driver to use. Defaults to ``default``.
51 pool_configs (list): A list of pool configurations
52 (:py:class:`~docker.types.IPAMPool`). Defaults to empty list.
53
54 Example:
55
56 >>> ipam_config = docker.types.IPAMConfig(driver='default')
57 >>> network = client.create_network('network1', ipam=ipam_config)
58
59 """
60 def __init__(self, driver='default', pool_configs=None):
61 self.update({
62 'Driver': driver,
63 'Config': pool_configs or []
64 })
65
66
67 class IPAMPool(dict):
68 """
69 Create an IPAM pool config dictionary to be added to the
70 ``pool_configs`` parameter of
71 :py:class:`~docker.types.IPAMConfig`.
72
73 Args:
74
75 subnet (str): Custom subnet for this IPAM pool using the CIDR
76 notation. Defaults to ``None``.
77 iprange (str): Custom IP range for endpoints in this IPAM pool using
78 the CIDR notation. Defaults to ``None``.
79 gateway (str): Custom IP address for the pool's gateway.
80 aux_addresses (dict): A dictionary of ``key -> ip_address``
81 relationships specifying auxiliary addresses that need to be
82 allocated by the IPAM driver.
83
84 Example:
85
86 >>> ipam_pool = docker.types.IPAMPool(
87 subnet='124.42.0.0/16',
88 iprange='124.42.0.0/24',
89 gateway='124.42.0.254',
90 aux_addresses={
91 'reserved1': '124.42.1.1'
92 }
93 )
94 >>> ipam_config = docker.types.IPAMConfig(
95 pool_configs=[ipam_pool])
96 """
97 def __init__(self, subnet=None, iprange=None, gateway=None,
98 aux_addresses=None):
99 self.update({
100 'Subnet': subnet,
101 'IPRange': iprange,
102 'Gateway': gateway,
103 'AuxiliaryAddresses': aux_addresses
104 })
105
[end of docker/types/networks.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docker/types/networks.py b/docker/types/networks.py
--- a/docker/types/networks.py
+++ b/docker/types/networks.py
@@ -50,6 +50,8 @@
driver (str): The IPAM driver to use. Defaults to ``default``.
pool_configs (list): A list of pool configurations
(:py:class:`~docker.types.IPAMPool`). Defaults to empty list.
+ options (dict): Driver options as a key-value dictionary.
+ Defaults to `None`.
Example:
@@ -57,12 +59,17 @@
>>> network = client.create_network('network1', ipam=ipam_config)
"""
- def __init__(self, driver='default', pool_configs=None):
+ def __init__(self, driver='default', pool_configs=None, options=None):
self.update({
'Driver': driver,
'Config': pool_configs or []
})
+ if options:
+ if not isinstance(options, dict):
+ raise TypeError('IPAMConfig options must be a dictionary')
+ self['Options'] = options
+
class IPAMPool(dict):
"""
| {"golden_diff": "diff --git a/docker/types/networks.py b/docker/types/networks.py\n--- a/docker/types/networks.py\n+++ b/docker/types/networks.py\n@@ -50,6 +50,8 @@\n driver (str): The IPAM driver to use. Defaults to ``default``.\n pool_configs (list): A list of pool configurations\n (:py:class:`~docker.types.IPAMPool`). Defaults to empty list.\n+ options (dict): Driver options as a key-value dictionary.\n+ Defaults to `None`.\n \n Example:\n \n@@ -57,12 +59,17 @@\n >>> network = client.create_network('network1', ipam=ipam_config)\n \n \"\"\"\n- def __init__(self, driver='default', pool_configs=None):\n+ def __init__(self, driver='default', pool_configs=None, options=None):\n self.update({\n 'Driver': driver,\n 'Config': pool_configs or []\n })\n \n+ if options:\n+ if not isinstance(options, dict):\n+ raise TypeError('IPAMConfig options must be a dictionary')\n+ self['Options'] = options\n+\n \n class IPAMPool(dict):\n \"\"\"\n", "issue": "Add docker network IPAM options parameter\nIPAM driver missing options\n\nsupports an options field in the IPAM config \nIt introduced in API v1.22.\n\n```\nPOST /networks/create Now supports an options field in the IPAM config that provides options for custom IPAM plugins.\n```\n\n", "before_files": [{"content": "from .. import errors\nfrom ..utils import normalize_links, version_lt\n\n\nclass EndpointConfig(dict):\n def __init__(self, version, aliases=None, links=None, ipv4_address=None,\n ipv6_address=None, link_local_ips=None):\n if version_lt(version, '1.22'):\n raise errors.InvalidVersion(\n 'Endpoint config is not supported for API version < 1.22'\n )\n\n if aliases:\n self[\"Aliases\"] = aliases\n\n if links:\n self[\"Links\"] = normalize_links(links)\n\n ipam_config = {}\n if ipv4_address:\n ipam_config['IPv4Address'] = ipv4_address\n\n if ipv6_address:\n ipam_config['IPv6Address'] = ipv6_address\n\n if link_local_ips is not None:\n if version_lt(version, '1.24'):\n raise errors.InvalidVersion(\n 'link_local_ips is not supported for API version < 1.24'\n )\n ipam_config['LinkLocalIPs'] = link_local_ips\n\n if ipam_config:\n self['IPAMConfig'] = ipam_config\n\n\nclass NetworkingConfig(dict):\n def __init__(self, endpoints_config=None):\n if endpoints_config:\n self[\"EndpointsConfig\"] = endpoints_config\n\n\nclass IPAMConfig(dict):\n \"\"\"\n Create an IPAM (IP Address Management) config dictionary to be used with\n :py:meth:`~docker.api.network.NetworkApiMixin.create_network`.\n\n Args:\n\n driver (str): The IPAM driver to use. Defaults to ``default``.\n pool_configs (list): A list of pool configurations\n (:py:class:`~docker.types.IPAMPool`). Defaults to empty list.\n\n Example:\n\n >>> ipam_config = docker.types.IPAMConfig(driver='default')\n >>> network = client.create_network('network1', ipam=ipam_config)\n\n \"\"\"\n def __init__(self, driver='default', pool_configs=None):\n self.update({\n 'Driver': driver,\n 'Config': pool_configs or []\n })\n\n\nclass IPAMPool(dict):\n \"\"\"\n Create an IPAM pool config dictionary to be added to the\n ``pool_configs`` parameter of\n :py:class:`~docker.types.IPAMConfig`.\n\n Args:\n\n subnet (str): Custom subnet for this IPAM pool using the CIDR\n notation. Defaults to ``None``.\n iprange (str): Custom IP range for endpoints in this IPAM pool using\n the CIDR notation. Defaults to ``None``.\n gateway (str): Custom IP address for the pool's gateway.\n aux_addresses (dict): A dictionary of ``key -> ip_address``\n relationships specifying auxiliary addresses that need to be\n allocated by the IPAM driver.\n\n Example:\n\n >>> ipam_pool = docker.types.IPAMPool(\n subnet='124.42.0.0/16',\n iprange='124.42.0.0/24',\n gateway='124.42.0.254',\n aux_addresses={\n 'reserved1': '124.42.1.1'\n }\n )\n >>> ipam_config = docker.types.IPAMConfig(\n pool_configs=[ipam_pool])\n \"\"\"\n def __init__(self, subnet=None, iprange=None, gateway=None,\n aux_addresses=None):\n self.update({\n 'Subnet': subnet,\n 'IPRange': iprange,\n 'Gateway': gateway,\n 'AuxiliaryAddresses': aux_addresses\n })\n", "path": "docker/types/networks.py"}]} | 1,578 | 254 |
gh_patches_debug_2452 | rasdani/github-patches | git_diff | pyinstaller__pyinstaller-2225 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
missing hidden import for skimage
When packaging an application that imports skimage.feature (and nothing else), the app would not run due to an ImportError on the "transform" module. This can be fixed by adding one item to the hiddenimports in hook-skimage.transform.py file (bolded below):
> hiddenimports = ['skimage.draw.draw',
> 'skimage._shared.geometry',
> 'skimage.filters.rank.core_cy',
> **'skimage._shared.transform'**]
>
> datas = collect_data_files('skimage')
PyInstaller 3.2, Windows 7 64 bit, Python 2.7.12, Anaconda 4.1.1 distribution.
</issue>
<code>
[start of PyInstaller/hooks/hook-skimage.transform.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2014-2016, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9 from PyInstaller.utils.hooks import collect_data_files
10
11 # Hook tested with scikit-image (skimage) 0.9.3 on Mac OS 10.9 and Windows 7
12 # 64-bit
13 hiddenimports = ['skimage.draw.draw',
14 'skimage._shared.geometry',
15 'skimage.filters.rank.core_cy']
16
17 datas = collect_data_files('skimage')
18
[end of PyInstaller/hooks/hook-skimage.transform.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/PyInstaller/hooks/hook-skimage.transform.py b/PyInstaller/hooks/hook-skimage.transform.py
--- a/PyInstaller/hooks/hook-skimage.transform.py
+++ b/PyInstaller/hooks/hook-skimage.transform.py
@@ -12,6 +12,7 @@
# 64-bit
hiddenimports = ['skimage.draw.draw',
'skimage._shared.geometry',
+ 'skimage._shared.transform',
'skimage.filters.rank.core_cy']
datas = collect_data_files('skimage')
| {"golden_diff": "diff --git a/PyInstaller/hooks/hook-skimage.transform.py b/PyInstaller/hooks/hook-skimage.transform.py\n--- a/PyInstaller/hooks/hook-skimage.transform.py\n+++ b/PyInstaller/hooks/hook-skimage.transform.py\n@@ -12,6 +12,7 @@\n # 64-bit\n hiddenimports = ['skimage.draw.draw',\n 'skimage._shared.geometry',\n+ 'skimage._shared.transform',\n 'skimage.filters.rank.core_cy']\n \n datas = collect_data_files('skimage')\n", "issue": "missing hidden import for skimage\nWhen packaging an application that imports skimage.feature (and nothing else), the app would not run due to an ImportError on the \"transform\" module. This can be fixed by adding one item to the hiddenimports in hook-skimage.transform.py file (bolded below):\n\n> hiddenimports = ['skimage.draw.draw',\n> 'skimage._shared.geometry',\n> 'skimage.filters.rank.core_cy',\n> **'skimage._shared.transform'**] \n> \n> datas = collect_data_files('skimage')\n\nPyInstaller 3.2, Windows 7 64 bit, Python 2.7.12, Anaconda 4.1.1 distribution.\n\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2014-2016, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\nfrom PyInstaller.utils.hooks import collect_data_files\n\n# Hook tested with scikit-image (skimage) 0.9.3 on Mac OS 10.9 and Windows 7\n# 64-bit\nhiddenimports = ['skimage.draw.draw',\n 'skimage._shared.geometry',\n 'skimage.filters.rank.core_cy']\n\ndatas = collect_data_files('skimage')\n", "path": "PyInstaller/hooks/hook-skimage.transform.py"}]} | 868 | 116 |
gh_patches_debug_7034 | rasdani/github-patches | git_diff | aws__aws-cli-5019 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add support for PyYAML 5.3
Closes: https://github.com/aws/aws-cli/issues/4828
Signed-off-by: Igor Raits <[email protected]>
*Issue #, if available:*
*Description of changes:*
By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 import codecs
3 import os.path
4 import re
5 import sys
6
7 from setuptools import setup, find_packages
8
9
10 here = os.path.abspath(os.path.dirname(__file__))
11
12
13 def read(*parts):
14 return codecs.open(os.path.join(here, *parts), 'r').read()
15
16
17 def find_version(*file_paths):
18 version_file = read(*file_paths)
19 version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
20 version_file, re.M)
21 if version_match:
22 return version_match.group(1)
23 raise RuntimeError("Unable to find version string.")
24
25
26 install_requires = [
27 'botocore==1.15.10',
28 'docutils>=0.10,<0.16',
29 'rsa>=3.1.2,<=3.5.0',
30 's3transfer>=0.3.0,<0.4.0',
31 'PyYAML>=3.10,<5.3',
32 ]
33
34
35 if sys.version_info[:2] == (3, 4):
36 install_requires.append('colorama>=0.2.5,<0.4.2')
37 else:
38 install_requires.append('colorama>=0.2.5,<0.4.4')
39
40
41 setup_options = dict(
42 name='awscli',
43 version=find_version("awscli", "__init__.py"),
44 description='Universal Command Line Environment for AWS.',
45 long_description=read('README.rst'),
46 author='Amazon Web Services',
47 url='http://aws.amazon.com/cli/',
48 scripts=['bin/aws', 'bin/aws.cmd',
49 'bin/aws_completer', 'bin/aws_zsh_completer.sh',
50 'bin/aws_bash_completer'],
51 packages=find_packages(exclude=['tests*']),
52 package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
53 'examples/*/*.txt', 'examples/*/*/*.txt',
54 'examples/*/*/*.rst', 'topics/*.rst',
55 'topics/*.json']},
56 install_requires=install_requires,
57 extras_require={},
58 license="Apache License 2.0",
59 classifiers=[
60 'Development Status :: 5 - Production/Stable',
61 'Intended Audience :: Developers',
62 'Intended Audience :: System Administrators',
63 'Natural Language :: English',
64 'License :: OSI Approved :: Apache Software License',
65 'Programming Language :: Python',
66 'Programming Language :: Python :: 2',
67 'Programming Language :: Python :: 2.7',
68 'Programming Language :: Python :: 3',
69 'Programming Language :: Python :: 3.4',
70 'Programming Language :: Python :: 3.5',
71 'Programming Language :: Python :: 3.6',
72 'Programming Language :: Python :: 3.7',
73 'Programming Language :: Python :: 3.8',
74 ],
75 )
76
77
78 if 'py2exe' in sys.argv:
79 # This will actually give us a py2exe command.
80 import py2exe
81 # And we have some py2exe specific options.
82 setup_options['options'] = {
83 'py2exe': {
84 'optimize': 0,
85 'skip_archive': True,
86 'dll_excludes': ['crypt32.dll'],
87 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',
88 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],
89 }
90 }
91 setup_options['console'] = ['bin/aws']
92
93
94 setup(**setup_options)
95
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -28,13 +28,14 @@
'docutils>=0.10,<0.16',
'rsa>=3.1.2,<=3.5.0',
's3transfer>=0.3.0,<0.4.0',
- 'PyYAML>=3.10,<5.3',
]
if sys.version_info[:2] == (3, 4):
+ install_requires.append('PyYAML>=3.10,<5.3')
install_requires.append('colorama>=0.2.5,<0.4.2')
else:
+ install_requires.append('PyYAML>=3.10,<5.4')
install_requires.append('colorama>=0.2.5,<0.4.4')
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -28,13 +28,14 @@\n 'docutils>=0.10,<0.16',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.3.0,<0.4.0',\n- 'PyYAML>=3.10,<5.3',\n ]\n \n \n if sys.version_info[:2] == (3, 4):\n+ install_requires.append('PyYAML>=3.10,<5.3')\n install_requires.append('colorama>=0.2.5,<0.4.2')\n else:\n+ install_requires.append('PyYAML>=3.10,<5.4')\n install_requires.append('colorama>=0.2.5,<0.4.4')\n", "issue": "Add support for PyYAML 5.3\nCloses: https://github.com/aws/aws-cli/issues/4828\r\nSigned-off-by: Igor Raits <[email protected]>\r\n\r\n*Issue #, if available:*\r\n\r\n*Description of changes:*\r\n\r\n\r\nBy submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\ninstall_requires = [\n 'botocore==1.15.10',\n 'docutils>=0.10,<0.16',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.3.0,<0.4.0',\n 'PyYAML>=3.10,<5.3',\n]\n\n\nif sys.version_info[:2] == (3, 4):\n install_requires.append('colorama>=0.2.5,<0.4.2')\nelse:\n install_requires.append('colorama>=0.2.5,<0.4.4')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=install_requires,\n extras_require={},\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],\n)\n\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}]} | 1,561 | 196 |
gh_patches_debug_18897 | rasdani/github-patches | git_diff | quantumlib__Cirq-1897 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Handedness between ISwapPowGate and FSim is different.
The ISwapPowGate and FSim gate rotate in different directions on the |01>, |10> subspace. This feels a bit odd to me.
</issue>
<code>
[start of cirq/ops/fsim_gate.py]
1 # Copyright 2019 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Defines the fermionic simulation gate family.
15
16 This is the family of two-qubit gates that preserve excitations (number of ON
17 qubits), ignoring single-qubit gates and global phase. For example, when using
18 the second quantized representation of electrons to simulate chemistry, this is
19 a natural gateset because each ON qubit corresponds to an electron and in the
20 context of chemistry the electron count is conserved over time. This property
21 applies more generally to fermions, thus the name of the gate.
22 """
23
24 import cmath
25 import math
26 from typing import Optional
27
28 import numpy as np
29
30 import cirq
31 from cirq import protocols, value
32 from cirq._compat import proper_repr
33 from cirq.ops import gate_features
34
35
36 @value.value_equality(approximate=True)
37 class FSimGate(gate_features.TwoQubitGate,
38 gate_features.InterchangeableQubitsGate):
39 """Fermionic simulation gate family.
40
41 Contains all two qubit interactions that preserve excitations, up to
42 single-qubit rotations and global phase.
43
44 The unitary matrix of this gate is:
45
46 [[1, 0, 0, 0],
47 [0, a, b, 0],
48 [0, b, a, 0],
49 [0, 0, 0, c]]
50
51 where:
52
53 a = cos(theta)
54 b = -i·sin(theta)
55 c = exp(-i·phi)
56 """
57
58 def __init__(self, theta: float, phi: float):
59 """
60 Args:
61 theta: The strength of the XX+YY interaction, in radians.
62 phi: CPhase angle in radians. Determines how much the |11⟩ state is
63 phased.
64 """
65 self.theta = theta
66 self.phi = phi
67
68 def _value_equality_values_(self):
69 return self.theta, self.phi
70
71 def _is_parameterized_(self):
72 return cirq.is_parameterized(self.theta) or cirq.is_parameterized(
73 self.phi)
74
75 def _unitary_(self) -> Optional[np.ndarray]:
76 if cirq.is_parameterized(self):
77 return None
78 a = math.cos(self.theta)
79 b = -1j * math.sin(self.theta)
80 c = cmath.exp(-1j * self.phi)
81 return np.array([
82 [1, 0, 0, 0],
83 [0, a, b, 0],
84 [0, b, a, 0],
85 [0, 0, 0, c],
86 ])
87
88 def _pauli_expansion_(self) -> value.LinearDict[str]:
89 if protocols.is_parameterized(self):
90 return NotImplemented
91 a = math.cos(self.theta)
92 b = -1j * math.sin(self.theta)
93 c = cmath.exp(-1j * self.phi)
94 return value.LinearDict({
95 'II': (1 + c) / 4 + a / 2,
96 'IZ': (1 - c) / 4,
97 'ZI': (1 - c) / 4,
98 'ZZ': (1 + c) / 4 - a / 2,
99 'XX': b / 2,
100 'YY': b / 2,
101 })
102
103 def _resolve_parameters_(self, param_resolver: 'cirq.ParamResolver'
104 ) -> 'cirq.FSimGate':
105 return FSimGate(
106 protocols.resolve_parameters(self.theta, param_resolver),
107 protocols.resolve_parameters(self.phi, param_resolver))
108
109 def _apply_unitary_(self,
110 args: 'cirq.ApplyUnitaryArgs') -> Optional[np.ndarray]:
111 if cirq.is_parameterized(self):
112 return None
113 if self.theta != 0:
114 inner_matrix = protocols.unitary(cirq.Rx(2 * self.theta))
115 oi = args.subspace_index(0b01)
116 io = args.subspace_index(0b10)
117 out = cirq.apply_matrix_to_slices(args.target_tensor,
118 inner_matrix,
119 slices=[oi, io],
120 out=args.available_buffer)
121 else:
122 out = args.target_tensor
123 if self.phi != 0:
124 ii = args.subspace_index(0b11)
125 out[ii] *= cmath.exp(-1j * self.phi)
126 return out
127
128 def _decompose_(self, qubits) -> 'cirq.OP_TREE':
129 a, b = qubits
130 xx = cirq.XXPowGate(exponent=self.theta / np.pi, global_shift=-0.5)
131 yy = cirq.YYPowGate(exponent=self.theta / np.pi, global_shift=-0.5)
132 yield xx(a, b)
133 yield yy(a, b)
134 yield cirq.CZ(a, b)**(-self.phi / np.pi)
135
136 def _circuit_diagram_info_(self, args: 'cirq.CircuitDiagramInfoArgs'):
137 t = _format_rads(args, self.theta)
138 p = _format_rads(args, self.phi)
139 return 'fsim({}, {})'.format(t, p), '#2'
140
141 def __pow__(self, power):
142 return FSimGate(cirq.mul(self.theta, power), cirq.mul(self.phi, power))
143
144 def __repr__(self):
145 return 'cirq.FSimGate(theta={}, phi={})'.format(proper_repr(self.theta),
146 proper_repr(self.phi))
147
148
149 def _format_rads(args: 'cirq.CircuitDiagramInfoArgs', radians: float) -> str:
150 if cirq.is_parameterized(radians):
151 return str(radians)
152 unit = 'π' if args.use_unicode_characters else 'pi'
153 if radians == np.pi:
154 return unit
155 if radians == 0:
156 return '0'
157 if radians == -np.pi:
158 return '-' + unit
159 if args.precision is not None:
160 quantity = '{{:.{}}}'.format(args.precision).format(radians / np.pi)
161 return quantity + unit
162 return repr(radians)
163
[end of cirq/ops/fsim_gate.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cirq/ops/fsim_gate.py b/cirq/ops/fsim_gate.py
--- a/cirq/ops/fsim_gate.py
+++ b/cirq/ops/fsim_gate.py
@@ -53,14 +53,23 @@
a = cos(theta)
b = -i·sin(theta)
c = exp(-i·phi)
+
+ Note the difference in sign conventions between FSimGate and the
+ ISWAP and CZPowGate:
+
+ FSimGate(θ, φ) = ISWAP**(-2θ/π) CZPowGate(exponent=-φ/π)
"""
def __init__(self, theta: float, phi: float):
"""
Args:
- theta: The strength of the XX+YY interaction, in radians.
- phi: CPhase angle in radians. Determines how much the |11⟩ state is
- phased.
+ theta: Swap angle on the span(|01⟩, |10⟩) subspace, in radians.
+ Determined by the strength and duration of the XX+YY
+ interaction. Note: uses opposite sign convention to the
+ iSWAP gate.
+ phi: Controlled phase angle, in radians. Determines how much the
+ |11⟩ state is phased. Note: uses opposite sign convention to
+ the CZPowGate.
"""
self.theta = theta
self.phi = phi
| {"golden_diff": "diff --git a/cirq/ops/fsim_gate.py b/cirq/ops/fsim_gate.py\n--- a/cirq/ops/fsim_gate.py\n+++ b/cirq/ops/fsim_gate.py\n@@ -53,14 +53,23 @@\n a = cos(theta)\n b = -i\u00b7sin(theta)\n c = exp(-i\u00b7phi)\n+\n+ Note the difference in sign conventions between FSimGate and the\n+ ISWAP and CZPowGate:\n+\n+ FSimGate(\u03b8, \u03c6) = ISWAP**(-2\u03b8/\u03c0) CZPowGate(exponent=-\u03c6/\u03c0)\n \"\"\"\n \n def __init__(self, theta: float, phi: float):\n \"\"\"\n Args:\n- theta: The strength of the XX+YY interaction, in radians.\n- phi: CPhase angle in radians. Determines how much the |11\u27e9 state is\n- phased.\n+ theta: Swap angle on the span(|01\u27e9, |10\u27e9) subspace, in radians.\n+ Determined by the strength and duration of the XX+YY\n+ interaction. Note: uses opposite sign convention to the\n+ iSWAP gate.\n+ phi: Controlled phase angle, in radians. Determines how much the\n+ |11\u27e9 state is phased. Note: uses opposite sign convention to\n+ the CZPowGate.\n \"\"\"\n self.theta = theta\n self.phi = phi\n", "issue": "Handedness between ISwapPowGate and FSim is different.\nThe ISwapPowGate and FSim gate rotate in different directions on the |01>, |10> subspace. This feels a bit odd to me. \n", "before_files": [{"content": "# Copyright 2019 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Defines the fermionic simulation gate family.\n\nThis is the family of two-qubit gates that preserve excitations (number of ON\nqubits), ignoring single-qubit gates and global phase. For example, when using\nthe second quantized representation of electrons to simulate chemistry, this is\na natural gateset because each ON qubit corresponds to an electron and in the\ncontext of chemistry the electron count is conserved over time. This property\napplies more generally to fermions, thus the name of the gate.\n\"\"\"\n\nimport cmath\nimport math\nfrom typing import Optional\n\nimport numpy as np\n\nimport cirq\nfrom cirq import protocols, value\nfrom cirq._compat import proper_repr\nfrom cirq.ops import gate_features\n\n\[email protected]_equality(approximate=True)\nclass FSimGate(gate_features.TwoQubitGate,\n gate_features.InterchangeableQubitsGate):\n \"\"\"Fermionic simulation gate family.\n\n Contains all two qubit interactions that preserve excitations, up to\n single-qubit rotations and global phase.\n\n The unitary matrix of this gate is:\n\n [[1, 0, 0, 0],\n [0, a, b, 0],\n [0, b, a, 0],\n [0, 0, 0, c]]\n\n where:\n\n a = cos(theta)\n b = -i\u00b7sin(theta)\n c = exp(-i\u00b7phi)\n \"\"\"\n\n def __init__(self, theta: float, phi: float):\n \"\"\"\n Args:\n theta: The strength of the XX+YY interaction, in radians.\n phi: CPhase angle in radians. Determines how much the |11\u27e9 state is\n phased.\n \"\"\"\n self.theta = theta\n self.phi = phi\n\n def _value_equality_values_(self):\n return self.theta, self.phi\n\n def _is_parameterized_(self):\n return cirq.is_parameterized(self.theta) or cirq.is_parameterized(\n self.phi)\n\n def _unitary_(self) -> Optional[np.ndarray]:\n if cirq.is_parameterized(self):\n return None\n a = math.cos(self.theta)\n b = -1j * math.sin(self.theta)\n c = cmath.exp(-1j * self.phi)\n return np.array([\n [1, 0, 0, 0],\n [0, a, b, 0],\n [0, b, a, 0],\n [0, 0, 0, c],\n ])\n\n def _pauli_expansion_(self) -> value.LinearDict[str]:\n if protocols.is_parameterized(self):\n return NotImplemented\n a = math.cos(self.theta)\n b = -1j * math.sin(self.theta)\n c = cmath.exp(-1j * self.phi)\n return value.LinearDict({\n 'II': (1 + c) / 4 + a / 2,\n 'IZ': (1 - c) / 4,\n 'ZI': (1 - c) / 4,\n 'ZZ': (1 + c) / 4 - a / 2,\n 'XX': b / 2,\n 'YY': b / 2,\n })\n\n def _resolve_parameters_(self, param_resolver: 'cirq.ParamResolver'\n ) -> 'cirq.FSimGate':\n return FSimGate(\n protocols.resolve_parameters(self.theta, param_resolver),\n protocols.resolve_parameters(self.phi, param_resolver))\n\n def _apply_unitary_(self,\n args: 'cirq.ApplyUnitaryArgs') -> Optional[np.ndarray]:\n if cirq.is_parameterized(self):\n return None\n if self.theta != 0:\n inner_matrix = protocols.unitary(cirq.Rx(2 * self.theta))\n oi = args.subspace_index(0b01)\n io = args.subspace_index(0b10)\n out = cirq.apply_matrix_to_slices(args.target_tensor,\n inner_matrix,\n slices=[oi, io],\n out=args.available_buffer)\n else:\n out = args.target_tensor\n if self.phi != 0:\n ii = args.subspace_index(0b11)\n out[ii] *= cmath.exp(-1j * self.phi)\n return out\n\n def _decompose_(self, qubits) -> 'cirq.OP_TREE':\n a, b = qubits\n xx = cirq.XXPowGate(exponent=self.theta / np.pi, global_shift=-0.5)\n yy = cirq.YYPowGate(exponent=self.theta / np.pi, global_shift=-0.5)\n yield xx(a, b)\n yield yy(a, b)\n yield cirq.CZ(a, b)**(-self.phi / np.pi)\n\n def _circuit_diagram_info_(self, args: 'cirq.CircuitDiagramInfoArgs'):\n t = _format_rads(args, self.theta)\n p = _format_rads(args, self.phi)\n return 'fsim({}, {})'.format(t, p), '#2'\n\n def __pow__(self, power):\n return FSimGate(cirq.mul(self.theta, power), cirq.mul(self.phi, power))\n\n def __repr__(self):\n return 'cirq.FSimGate(theta={}, phi={})'.format(proper_repr(self.theta),\n proper_repr(self.phi))\n\n\ndef _format_rads(args: 'cirq.CircuitDiagramInfoArgs', radians: float) -> str:\n if cirq.is_parameterized(radians):\n return str(radians)\n unit = '\u03c0' if args.use_unicode_characters else 'pi'\n if radians == np.pi:\n return unit\n if radians == 0:\n return '0'\n if radians == -np.pi:\n return '-' + unit\n if args.precision is not None:\n quantity = '{{:.{}}}'.format(args.precision).format(radians / np.pi)\n return quantity + unit\n return repr(radians)\n", "path": "cirq/ops/fsim_gate.py"}]} | 2,399 | 319 |
gh_patches_debug_7774 | rasdani/github-patches | git_diff | aio-libs__aiohttp-4120 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Fix connection info in ClientConnectorError client exception
The message like `Cannot connect to host www.google.com:443 ssl:None` is misleading.
`ssl:None` means 'default', not 'disabled'
`Cannot connect to host www.google.com:443 ssl:[default]` looks much better
</issue>
<code>
[start of aiohttp/client_exceptions.py]
1 """HTTP related errors."""
2
3 import asyncio
4 from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
5
6 from .typedefs import _CIMultiDict
7
8 try:
9 import ssl
10 SSLContext = ssl.SSLContext
11 except ImportError: # pragma: no cover
12 ssl = SSLContext = None # type: ignore
13
14
15 if TYPE_CHECKING: # pragma: no cover
16 from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
17 Fingerprint)
18 else:
19 RequestInfo = ClientResponse = ConnectionKey = None
20
21
22 __all__ = (
23 'ClientError',
24
25 'ClientConnectionError',
26 'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
27
28 'ClientSSLError',
29 'ClientConnectorSSLError', 'ClientConnectorCertificateError',
30
31 'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
32 'ServerFingerprintMismatch',
33
34 'ClientResponseError', 'ClientHttpProxyError',
35 'WSServerHandshakeError', 'ContentTypeError',
36
37 'ClientPayloadError', 'InvalidURL')
38
39
40 class ClientError(Exception):
41 """Base class for client connection errors."""
42
43
44 class ClientResponseError(ClientError):
45 """Connection error during reading response.
46
47 request_info: instance of RequestInfo
48 """
49
50 def __init__(self, request_info: RequestInfo,
51 history: Tuple[ClientResponse, ...], *,
52 status: Optional[int]=None,
53 message: str='',
54 headers: Optional[_CIMultiDict]=None) -> None:
55 self.request_info = request_info
56 if status is not None:
57 self.status = status
58 else:
59 self.status = 0
60 self.message = message
61 self.headers = headers
62 self.history = history
63 self.args = (request_info, history)
64
65 def __str__(self) -> str:
66 return ("%s, message=%r, url=%r" %
67 (self.status, self.message, self.request_info.real_url))
68
69 def __repr__(self) -> str:
70 args = "%r, %r" % (self.request_info, self.history)
71 if self.status != 0:
72 args += ", status=%r" % (self.status,)
73 if self.message != '':
74 args += ", message=%r" % (self.message,)
75 if self.headers is not None:
76 args += ", headers=%r" % (self.headers,)
77 return "%s(%s)" % (type(self).__name__, args)
78
79
80 class ContentTypeError(ClientResponseError):
81 """ContentType found is not valid."""
82
83
84 class WSServerHandshakeError(ClientResponseError):
85 """websocket server handshake error."""
86
87
88 class ClientHttpProxyError(ClientResponseError):
89 """HTTP proxy error.
90
91 Raised in :class:`aiohttp.connector.TCPConnector` if
92 proxy responds with status other than ``200 OK``
93 on ``CONNECT`` request.
94 """
95
96
97 class TooManyRedirects(ClientResponseError):
98 """Client was redirected too many times."""
99
100
101 class ClientConnectionError(ClientError):
102 """Base class for client socket errors."""
103
104
105 class ClientOSError(ClientConnectionError, OSError):
106 """OSError error."""
107
108
109 class ClientConnectorError(ClientOSError):
110 """Client connector error.
111
112 Raised in :class:`aiohttp.connector.TCPConnector` if
113 connection to proxy can not be established.
114 """
115 def __init__(self, connection_key: ConnectionKey,
116 os_error: OSError) -> None:
117 self._conn_key = connection_key
118 self._os_error = os_error
119 super().__init__(os_error.errno, os_error.strerror)
120 self.args = (connection_key, os_error)
121
122 @property
123 def os_error(self) -> OSError:
124 return self._os_error
125
126 @property
127 def host(self) -> str:
128 return self._conn_key.host
129
130 @property
131 def port(self) -> Optional[int]:
132 return self._conn_key.port
133
134 @property
135 def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
136 return self._conn_key.ssl
137
138 def __str__(self) -> str:
139 return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'
140 .format(self, self.strerror))
141
142 # OSError.__reduce__ does too much black magick
143 __reduce__ = BaseException.__reduce__
144
145
146 class ClientProxyConnectionError(ClientConnectorError):
147 """Proxy connection error.
148
149 Raised in :class:`aiohttp.connector.TCPConnector` if
150 connection to proxy can not be established.
151 """
152
153
154 class ServerConnectionError(ClientConnectionError):
155 """Server connection errors."""
156
157
158 class ServerDisconnectedError(ServerConnectionError):
159 """Server disconnected."""
160
161 def __init__(self, message: Optional[str]=None) -> None:
162 self.message = message
163 if message is None:
164 self.args = ()
165 else:
166 self.args = (message,)
167
168
169 class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
170 """Server timeout error."""
171
172
173 class ServerFingerprintMismatch(ServerConnectionError):
174 """SSL certificate does not match expected fingerprint."""
175
176 def __init__(self, expected: bytes, got: bytes,
177 host: str, port: int) -> None:
178 self.expected = expected
179 self.got = got
180 self.host = host
181 self.port = port
182 self.args = (expected, got, host, port)
183
184 def __repr__(self) -> str:
185 return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
186 self.__class__.__name__, self.expected, self.got,
187 self.host, self.port)
188
189
190 class ClientPayloadError(ClientError):
191 """Response payload error."""
192
193
194 class InvalidURL(ClientError, ValueError):
195 """Invalid URL.
196
197 URL used for fetching is malformed, e.g. it doesn't contains host
198 part."""
199
200 # Derive from ValueError for backward compatibility
201
202 def __init__(self, url: Any) -> None:
203 # The type of url is not yarl.URL because the exception can be raised
204 # on URL(url) call
205 super().__init__(url)
206
207 @property
208 def url(self) -> Any:
209 return self.args[0]
210
211 def __repr__(self) -> str:
212 return '<{} {}>'.format(self.__class__.__name__, self.url)
213
214
215 class ClientSSLError(ClientConnectorError):
216 """Base error for ssl.*Errors."""
217
218
219 if ssl is not None:
220 cert_errors = (ssl.CertificateError,)
221 cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
222
223 ssl_errors = (ssl.SSLError,)
224 ssl_error_bases = (ClientSSLError, ssl.SSLError)
225 else: # pragma: no cover
226 cert_errors = tuple()
227 cert_errors_bases = (ClientSSLError, ValueError,)
228
229 ssl_errors = tuple()
230 ssl_error_bases = (ClientSSLError,)
231
232
233 class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
234 """Response ssl error."""
235
236
237 class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
238 """Response certificate error."""
239
240 def __init__(self, connection_key:
241 ConnectionKey, certificate_error: Exception) -> None:
242 self._conn_key = connection_key
243 self._certificate_error = certificate_error
244 self.args = (connection_key, certificate_error)
245
246 @property
247 def certificate_error(self) -> Exception:
248 return self._certificate_error
249
250 @property
251 def host(self) -> str:
252 return self._conn_key.host
253
254 @property
255 def port(self) -> Optional[int]:
256 return self._conn_key.port
257
258 @property
259 def ssl(self) -> bool:
260 return self._conn_key.is_ssl
261
262 def __str__(self) -> str:
263 return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
264 '[{0.certificate_error.__class__.__name__}: '
265 '{0.certificate_error.args}]'.format(self))
266
[end of aiohttp/client_exceptions.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py
--- a/aiohttp/client_exceptions.py
+++ b/aiohttp/client_exceptions.py
@@ -136,8 +136,9 @@
return self._conn_key.ssl
def __str__(self) -> str:
- return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'
- .format(self, self.strerror))
+ return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
+ .format(self, self.ssl if self.ssl is not None else 'default',
+ self.strerror))
# OSError.__reduce__ does too much black magick
__reduce__ = BaseException.__reduce__
| {"golden_diff": "diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py\n--- a/aiohttp/client_exceptions.py\n+++ b/aiohttp/client_exceptions.py\n@@ -136,8 +136,9 @@\n return self._conn_key.ssl\n \n def __str__(self) -> str:\n- return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'\n- .format(self, self.strerror))\n+ return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'\n+ .format(self, self.ssl if self.ssl is not None else 'default',\n+ self.strerror))\n \n # OSError.__reduce__ does too much black magick\n __reduce__ = BaseException.__reduce__\n", "issue": "Fix connection info in ClientConnectorError client exception\nThe message like `Cannot connect to host www.google.com:443 ssl:None` is misleading.\r\n`ssl:None` means 'default', not 'disabled'\r\n\r\n`Cannot connect to host www.google.com:443 ssl:[default]` looks much better\n", "before_files": [{"content": "\"\"\"HTTP related errors.\"\"\"\n\nimport asyncio\nfrom typing import TYPE_CHECKING, Any, Optional, Tuple, Union\n\nfrom .typedefs import _CIMultiDict\n\ntry:\n import ssl\n SSLContext = ssl.SSLContext\nexcept ImportError: # pragma: no cover\n ssl = SSLContext = None # type: ignore\n\n\nif TYPE_CHECKING: # pragma: no cover\n from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa\n Fingerprint)\nelse:\n RequestInfo = ClientResponse = ConnectionKey = None\n\n\n__all__ = (\n 'ClientError',\n\n 'ClientConnectionError',\n 'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',\n\n 'ClientSSLError',\n 'ClientConnectorSSLError', 'ClientConnectorCertificateError',\n\n 'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',\n 'ServerFingerprintMismatch',\n\n 'ClientResponseError', 'ClientHttpProxyError',\n 'WSServerHandshakeError', 'ContentTypeError',\n\n 'ClientPayloadError', 'InvalidURL')\n\n\nclass ClientError(Exception):\n \"\"\"Base class for client connection errors.\"\"\"\n\n\nclass ClientResponseError(ClientError):\n \"\"\"Connection error during reading response.\n\n request_info: instance of RequestInfo\n \"\"\"\n\n def __init__(self, request_info: RequestInfo,\n history: Tuple[ClientResponse, ...], *,\n status: Optional[int]=None,\n message: str='',\n headers: Optional[_CIMultiDict]=None) -> None:\n self.request_info = request_info\n if status is not None:\n self.status = status\n else:\n self.status = 0\n self.message = message\n self.headers = headers\n self.history = history\n self.args = (request_info, history)\n\n def __str__(self) -> str:\n return (\"%s, message=%r, url=%r\" %\n (self.status, self.message, self.request_info.real_url))\n\n def __repr__(self) -> str:\n args = \"%r, %r\" % (self.request_info, self.history)\n if self.status != 0:\n args += \", status=%r\" % (self.status,)\n if self.message != '':\n args += \", message=%r\" % (self.message,)\n if self.headers is not None:\n args += \", headers=%r\" % (self.headers,)\n return \"%s(%s)\" % (type(self).__name__, args)\n\n\nclass ContentTypeError(ClientResponseError):\n \"\"\"ContentType found is not valid.\"\"\"\n\n\nclass WSServerHandshakeError(ClientResponseError):\n \"\"\"websocket server handshake error.\"\"\"\n\n\nclass ClientHttpProxyError(ClientResponseError):\n \"\"\"HTTP proxy error.\n\n Raised in :class:`aiohttp.connector.TCPConnector` if\n proxy responds with status other than ``200 OK``\n on ``CONNECT`` request.\n \"\"\"\n\n\nclass TooManyRedirects(ClientResponseError):\n \"\"\"Client was redirected too many times.\"\"\"\n\n\nclass ClientConnectionError(ClientError):\n \"\"\"Base class for client socket errors.\"\"\"\n\n\nclass ClientOSError(ClientConnectionError, OSError):\n \"\"\"OSError error.\"\"\"\n\n\nclass ClientConnectorError(ClientOSError):\n \"\"\"Client connector error.\n\n Raised in :class:`aiohttp.connector.TCPConnector` if\n connection to proxy can not be established.\n \"\"\"\n def __init__(self, connection_key: ConnectionKey,\n os_error: OSError) -> None:\n self._conn_key = connection_key\n self._os_error = os_error\n super().__init__(os_error.errno, os_error.strerror)\n self.args = (connection_key, os_error)\n\n @property\n def os_error(self) -> OSError:\n return self._os_error\n\n @property\n def host(self) -> str:\n return self._conn_key.host\n\n @property\n def port(self) -> Optional[int]:\n return self._conn_key.port\n\n @property\n def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:\n return self._conn_key.ssl\n\n def __str__(self) -> str:\n return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'\n .format(self, self.strerror))\n\n # OSError.__reduce__ does too much black magick\n __reduce__ = BaseException.__reduce__\n\n\nclass ClientProxyConnectionError(ClientConnectorError):\n \"\"\"Proxy connection error.\n\n Raised in :class:`aiohttp.connector.TCPConnector` if\n connection to proxy can not be established.\n \"\"\"\n\n\nclass ServerConnectionError(ClientConnectionError):\n \"\"\"Server connection errors.\"\"\"\n\n\nclass ServerDisconnectedError(ServerConnectionError):\n \"\"\"Server disconnected.\"\"\"\n\n def __init__(self, message: Optional[str]=None) -> None:\n self.message = message\n if message is None:\n self.args = ()\n else:\n self.args = (message,)\n\n\nclass ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):\n \"\"\"Server timeout error.\"\"\"\n\n\nclass ServerFingerprintMismatch(ServerConnectionError):\n \"\"\"SSL certificate does not match expected fingerprint.\"\"\"\n\n def __init__(self, expected: bytes, got: bytes,\n host: str, port: int) -> None:\n self.expected = expected\n self.got = got\n self.host = host\n self.port = port\n self.args = (expected, got, host, port)\n\n def __repr__(self) -> str:\n return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(\n self.__class__.__name__, self.expected, self.got,\n self.host, self.port)\n\n\nclass ClientPayloadError(ClientError):\n \"\"\"Response payload error.\"\"\"\n\n\nclass InvalidURL(ClientError, ValueError):\n \"\"\"Invalid URL.\n\n URL used for fetching is malformed, e.g. it doesn't contains host\n part.\"\"\"\n\n # Derive from ValueError for backward compatibility\n\n def __init__(self, url: Any) -> None:\n # The type of url is not yarl.URL because the exception can be raised\n # on URL(url) call\n super().__init__(url)\n\n @property\n def url(self) -> Any:\n return self.args[0]\n\n def __repr__(self) -> str:\n return '<{} {}>'.format(self.__class__.__name__, self.url)\n\n\nclass ClientSSLError(ClientConnectorError):\n \"\"\"Base error for ssl.*Errors.\"\"\"\n\n\nif ssl is not None:\n cert_errors = (ssl.CertificateError,)\n cert_errors_bases = (ClientSSLError, ssl.CertificateError,)\n\n ssl_errors = (ssl.SSLError,)\n ssl_error_bases = (ClientSSLError, ssl.SSLError)\nelse: # pragma: no cover\n cert_errors = tuple()\n cert_errors_bases = (ClientSSLError, ValueError,)\n\n ssl_errors = tuple()\n ssl_error_bases = (ClientSSLError,)\n\n\nclass ClientConnectorSSLError(*ssl_error_bases): # type: ignore\n \"\"\"Response ssl error.\"\"\"\n\n\nclass ClientConnectorCertificateError(*cert_errors_bases): # type: ignore\n \"\"\"Response certificate error.\"\"\"\n\n def __init__(self, connection_key:\n ConnectionKey, certificate_error: Exception) -> None:\n self._conn_key = connection_key\n self._certificate_error = certificate_error\n self.args = (connection_key, certificate_error)\n\n @property\n def certificate_error(self) -> Exception:\n return self._certificate_error\n\n @property\n def host(self) -> str:\n return self._conn_key.host\n\n @property\n def port(self) -> Optional[int]:\n return self._conn_key.port\n\n @property\n def ssl(self) -> bool:\n return self._conn_key.is_ssl\n\n def __str__(self) -> str:\n return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '\n '[{0.certificate_error.__class__.__name__}: '\n '{0.certificate_error.args}]'.format(self))\n", "path": "aiohttp/client_exceptions.py"}]} | 3,065 | 173 |
gh_patches_debug_26432 | rasdani/github-patches | git_diff | stephenmcd__mezzanine-846 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
UnicodeDecodeError ('ascii' codec can't decode) when memcached enabled
I can reproduce this on Ubuntu 12 and CentOS 6.
memcached installed and enabled.
Install new virtualenv and activate
~/venv/workingvirtualenv
source ~/venv/workingvirtulaenv/bin/activate
pip install mezzanine
pip install python-memcached
mezzanine-project testproject
add the live_settings.py cache settings to local_settings.py
Now create the DB
python manage.py createdb
Select all defaults and answer yes to questions including creating demo data..
Can browse demo pages including /gallery with no errors and caching appears to be working.
Now create and activate a new virtualenv and install everything exactly the same only this time install mezzanine from github.
git clone https://github.com/stephenmcd/mezzanine.git
cd mezzanine
python setup.py install
Now when you try browse the pages they will work until you go to /gallery/ and you will get the following error.
The error is:
"UnicodeDecodeError: 'ascii' codec can't decode byte 0xcc in position 31: ordinal not in range(128)"
If I install the DB with caching not configured in local_settings.py I can browse to /gallery with no error. Then when I try enable caching and browse to /gallery I get the error again. I also tried recreating the DB in the new virtualenv with the newer version of mezzanine. The problem still occurs.
I can reproduce all this when I use MySQL instead of SQLite as well. Also occurs on blogposts. My live site is using this github version of Mezzanine because my theme uses bootstrap3. Just want to get caching working now.
</issue>
<code>
[start of mezzanine/core/middleware.py]
1 from __future__ import unicode_literals
2
3 from future.utils import native_str
4
5 from django.contrib import admin
6 from django.contrib.auth import logout
7 from django.contrib.redirects.models import Redirect
8 from django.core.exceptions import MiddlewareNotUsed
9 from django.core.urlresolvers import reverse
10 from django.http import (HttpResponse, HttpResponseRedirect,
11 HttpResponsePermanentRedirect, HttpResponseGone)
12 from django.utils.cache import get_max_age
13 from django.template import Template, RequestContext
14 from django.middleware.csrf import CsrfViewMiddleware, get_token
15
16 from mezzanine.conf import settings
17 from mezzanine.core.models import SitePermission
18 from mezzanine.utils.cache import (cache_key_prefix, nevercache_token,
19 cache_get, cache_set, cache_installed)
20 from mezzanine.utils.device import templates_for_device
21 from mezzanine.utils.sites import current_site_id, templates_for_host
22 from mezzanine.utils.urls import next_url
23
24
25 _deprecated = {
26 "AdminLoginInterfaceSelector": "AdminLoginInterfaceSelectorMiddleware",
27 "DeviceAwareUpdateCacheMiddleware": "UpdateCacheMiddleware",
28 "DeviceAwareFetchFromCacheMiddleware": "FetchFromCacheMiddleware",
29 }
30
31
32 class _Deprecated(object):
33 def __init__(self, *args, **kwargs):
34 from warnings import warn
35 msg = "mezzanine.core.middleware.%s is deprecated." % self.old
36 if self.new:
37 msg += (" Please change the MIDDLEWARE_CLASSES setting to use "
38 "mezzanine.core.middleware.%s" % self.new)
39 warn(msg)
40
41 for old, new in _deprecated.items():
42 globals()[old] = type(native_str(old),
43 (_Deprecated,),
44 {"old": old, "new": new})
45
46
47 class AdminLoginInterfaceSelectorMiddleware(object):
48 """
49 Checks for a POST from the admin login view and if authentication is
50 successful and the "site" interface is selected, redirect to the site.
51 """
52 def process_view(self, request, view_func, view_args, view_kwargs):
53 login_type = request.POST.get("mezzanine_login_interface")
54 if login_type and not request.user.is_authenticated():
55 response = view_func(request, *view_args, **view_kwargs)
56 if request.user.is_authenticated():
57 if login_type == "admin":
58 next = request.get_full_path()
59 else:
60 next = next_url(request) or "/"
61 return HttpResponseRedirect(next)
62 else:
63 return response
64 return None
65
66
67 class SitePermissionMiddleware(object):
68 """
69 Marks the current user with a ``has_site_permission`` which is
70 used in place of ``user.is_staff`` to achieve per-site staff
71 access.
72 """
73 def process_view(self, request, view_func, view_args, view_kwargs):
74 has_site_permission = False
75 if request.user.is_superuser:
76 has_site_permission = True
77 elif request.user.is_staff:
78 lookup = {"user": request.user, "sites": current_site_id()}
79 try:
80 SitePermission.objects.get(**lookup)
81 except SitePermission.DoesNotExist:
82 admin_index = reverse("admin:index")
83 if request.path.startswith(admin_index):
84 logout(request)
85 view_func = admin.site.login
86 extra_context = {"no_site_permission": True}
87 return view_func(request, extra_context=extra_context)
88 else:
89 has_site_permission = True
90 request.user.has_site_permission = has_site_permission
91
92
93 class TemplateForDeviceMiddleware(object):
94 """
95 Inserts device-specific templates to the template list.
96 """
97 def process_template_response(self, request, response):
98 if hasattr(response, "template_name"):
99 if not isinstance(response.template_name, Template):
100 templates = templates_for_device(request,
101 response.template_name)
102 response.template_name = templates
103 return response
104
105
106 class TemplateForHostMiddleware(object):
107 """
108 Inserts host-specific templates to the template list.
109 """
110 def process_template_response(self, request, response):
111 if hasattr(response, "template_name"):
112 if not isinstance(response.template_name, Template):
113 templates = templates_for_host(request,
114 response.template_name)
115 response.template_name = templates
116 return response
117
118
119 class UpdateCacheMiddleware(object):
120 """
121 Response phase for Mezzanine's cache middleware. Handles caching
122 the response, and then performing the second phase of rendering,
123 for content enclosed by the ``nevercache`` tag.
124 """
125
126 def process_response(self, request, response):
127
128 # Cache the response if all the required conditions are met.
129 # Response must be marked for updating by the
130 # ``FetchFromCacheMiddleware`` having a cache get miss, the
131 # user must not be authenticated, the HTTP status must be OK
132 # and the response mustn't include an expiry age, incicating it
133 # shouldn't be cached.
134 marked_for_update = getattr(request, "_update_cache", False)
135 anon = hasattr(request, "user") and not request.user.is_authenticated()
136 valid_status = response.status_code == 200
137 timeout = get_max_age(response)
138 if timeout is None:
139 timeout = settings.CACHE_MIDDLEWARE_SECONDS
140 if anon and valid_status and marked_for_update and timeout:
141 cache_key = cache_key_prefix(request) + request.get_full_path()
142 _cache_set = lambda r: cache_set(cache_key, r.content, timeout)
143 if callable(getattr(response, "render", None)):
144 response.add_post_render_callback(_cache_set)
145 else:
146 _cache_set(response)
147
148 # Second phase rendering for non-cached template code and
149 # content. Split on the delimiter the ``nevercache`` tag
150 # wrapped its contents in, and render only the content
151 # enclosed by it, to avoid possible template code injection.
152 parts = response.content.split(nevercache_token())
153 content_type = response.get("content-type", "")
154 if content_type.startswith("text") and len(parts) > 1:
155 # Restore csrf token from cookie - check the response
156 # first as it may be being set for the first time.
157 csrf_token = None
158 try:
159 csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value
160 except KeyError:
161 try:
162 csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
163 except KeyError:
164 pass
165 if csrf_token:
166 request.META["CSRF_COOKIE"] = csrf_token
167 context = RequestContext(request)
168 for i, part in enumerate(parts):
169 if i % 2:
170 part = Template(part).render(context).encode("utf-8")
171 parts[i] = part
172 response.content = "".join(parts)
173 response["Content-Length"] = len(response.content)
174 if hasattr(request, '_messages'):
175 # Required to clear out user messages.
176 request._messages.update(response)
177 return response
178
179
180 class FetchFromCacheMiddleware(object):
181 """
182 Request phase for Mezzanine cache middleware. Return a response
183 from cache if found, othwerwise mark the request for updating
184 the cache in ``UpdateCacheMiddleware``.
185 """
186
187 def process_request(self, request):
188 if (cache_installed() and request.method == "GET" and
189 not request.user.is_authenticated()):
190 cache_key = cache_key_prefix(request) + request.get_full_path()
191 response = cache_get(cache_key)
192 # We need to force a csrf token here, as new sessions
193 # won't receieve one on their first request, with cache
194 # middleware running.
195 csrf_mw_name = "django.middleware.csrf.CsrfViewMiddleware"
196 if csrf_mw_name in settings.MIDDLEWARE_CLASSES:
197 csrf_mw = CsrfViewMiddleware()
198 csrf_mw.process_view(request, lambda x: None, None, None)
199 get_token(request)
200 if response is None:
201 request._update_cache = True
202 else:
203 return HttpResponse(response)
204
205
206 class SSLRedirectMiddleware(object):
207 """
208 Handles redirections required for SSL when ``SSL_ENABLED`` is ``True``.
209
210 If ``SSL_FORCE_HOST`` is ``True``, and is not the current host,
211 redirect to it.
212
213 Also ensure URLs defined by ``SSL_FORCE_URL_PREFIXES`` are redirect
214 to HTTPS, and redirect all other URLs to HTTP if on HTTPS.
215 """
216 def process_request(self, request):
217 settings.use_editable()
218 force_host = settings.SSL_FORCE_HOST
219 if force_host and request.get_host().split(":")[0] != force_host:
220 url = "http://%s%s" % (force_host, request.get_full_path())
221 return HttpResponsePermanentRedirect(url)
222 if settings.SSL_ENABLED and not settings.DEV_SERVER:
223 url = "%s%s" % (request.get_host(), request.get_full_path())
224 if request.path.startswith(settings.SSL_FORCE_URL_PREFIXES):
225 if not request.is_secure():
226 return HttpResponseRedirect("https://%s" % url)
227 elif request.is_secure() and settings.SSL_FORCED_PREFIXES_ONLY:
228 return HttpResponseRedirect("http://%s" % url)
229
230
231 class RedirectFallbackMiddleware(object):
232 """
233 Port of Django's ``RedirectFallbackMiddleware`` that uses
234 Mezzanine's approach for determining the current site.
235 """
236
237 def __init__(self):
238 if "django.contrib.redirects" not in settings.INSTALLED_APPS:
239 raise MiddlewareNotUsed
240
241 def process_response(self, request, response):
242 if response.status_code == 404:
243 lookup = {
244 "site_id": current_site_id(),
245 "old_path": request.get_full_path(),
246 }
247 try:
248 redirect = Redirect.objects.get(**lookup)
249 except Redirect.DoesNotExist:
250 pass
251 else:
252 if not redirect.new_path:
253 response = HttpResponseGone()
254 else:
255 response = HttpResponseRedirect(redirect.new_path)
256 return response
257
[end of mezzanine/core/middleware.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mezzanine/core/middleware.py b/mezzanine/core/middleware.py
--- a/mezzanine/core/middleware.py
+++ b/mezzanine/core/middleware.py
@@ -149,7 +149,12 @@
# content. Split on the delimiter the ``nevercache`` tag
# wrapped its contents in, and render only the content
# enclosed by it, to avoid possible template code injection.
- parts = response.content.split(nevercache_token())
+ token = nevercache_token()
+ try:
+ token = token.encode('utf-8')
+ except AttributeError:
+ pass
+ parts = response.content.split(token)
content_type = response.get("content-type", "")
if content_type.startswith("text") and len(parts) > 1:
# Restore csrf token from cookie - check the response
@@ -169,7 +174,7 @@
if i % 2:
part = Template(part).render(context).encode("utf-8")
parts[i] = part
- response.content = "".join(parts)
+ response.content = b"".join(parts)
response["Content-Length"] = len(response.content)
if hasattr(request, '_messages'):
# Required to clear out user messages.
| {"golden_diff": "diff --git a/mezzanine/core/middleware.py b/mezzanine/core/middleware.py\n--- a/mezzanine/core/middleware.py\n+++ b/mezzanine/core/middleware.py\n@@ -149,7 +149,12 @@\n # content. Split on the delimiter the ``nevercache`` tag\n # wrapped its contents in, and render only the content\n # enclosed by it, to avoid possible template code injection.\n- parts = response.content.split(nevercache_token())\n+ token = nevercache_token()\n+ try:\n+ token = token.encode('utf-8')\n+ except AttributeError:\n+ pass\n+ parts = response.content.split(token)\n content_type = response.get(\"content-type\", \"\")\n if content_type.startswith(\"text\") and len(parts) > 1:\n # Restore csrf token from cookie - check the response\n@@ -169,7 +174,7 @@\n if i % 2:\n part = Template(part).render(context).encode(\"utf-8\")\n parts[i] = part\n- response.content = \"\".join(parts)\n+ response.content = b\"\".join(parts)\n response[\"Content-Length\"] = len(response.content)\n if hasattr(request, '_messages'):\n # Required to clear out user messages.\n", "issue": "UnicodeDecodeError ('ascii' codec can't decode) when memcached enabled\nI can reproduce this on Ubuntu 12 and CentOS 6. \n\nmemcached installed and enabled. \n\nInstall new virtualenv and activate\n~/venv/workingvirtualenv\nsource ~/venv/workingvirtulaenv/bin/activate\n\npip install mezzanine\npip install python-memcached\nmezzanine-project testproject\n\nadd the live_settings.py cache settings to local_settings.py\n\nNow create the DB\npython manage.py createdb\n\nSelect all defaults and answer yes to questions including creating demo data..\nCan browse demo pages including /gallery with no errors and caching appears to be working.\n\nNow create and activate a new virtualenv and install everything exactly the same only this time install mezzanine from github.\ngit clone https://github.com/stephenmcd/mezzanine.git\ncd mezzanine\npython setup.py install\n\nNow when you try browse the pages they will work until you go to /gallery/ and you will get the following error.\n\nThe error is:\n\"UnicodeDecodeError: 'ascii' codec can't decode byte 0xcc in position 31: ordinal not in range(128)\"\n\nIf I install the DB with caching not configured in local_settings.py I can browse to /gallery with no error. Then when I try enable caching and browse to /gallery I get the error again. I also tried recreating the DB in the new virtualenv with the newer version of mezzanine. The problem still occurs.\n\nI can reproduce all this when I use MySQL instead of SQLite as well. Also occurs on blogposts. My live site is using this github version of Mezzanine because my theme uses bootstrap3. Just want to get caching working now.\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom future.utils import native_str\n\nfrom django.contrib import admin\nfrom django.contrib.auth import logout\nfrom django.contrib.redirects.models import Redirect\nfrom django.core.exceptions import MiddlewareNotUsed\nfrom django.core.urlresolvers import reverse\nfrom django.http import (HttpResponse, HttpResponseRedirect,\n HttpResponsePermanentRedirect, HttpResponseGone)\nfrom django.utils.cache import get_max_age\nfrom django.template import Template, RequestContext\nfrom django.middleware.csrf import CsrfViewMiddleware, get_token\n\nfrom mezzanine.conf import settings\nfrom mezzanine.core.models import SitePermission\nfrom mezzanine.utils.cache import (cache_key_prefix, nevercache_token,\n cache_get, cache_set, cache_installed)\nfrom mezzanine.utils.device import templates_for_device\nfrom mezzanine.utils.sites import current_site_id, templates_for_host\nfrom mezzanine.utils.urls import next_url\n\n\n_deprecated = {\n \"AdminLoginInterfaceSelector\": \"AdminLoginInterfaceSelectorMiddleware\",\n \"DeviceAwareUpdateCacheMiddleware\": \"UpdateCacheMiddleware\",\n \"DeviceAwareFetchFromCacheMiddleware\": \"FetchFromCacheMiddleware\",\n}\n\n\nclass _Deprecated(object):\n def __init__(self, *args, **kwargs):\n from warnings import warn\n msg = \"mezzanine.core.middleware.%s is deprecated.\" % self.old\n if self.new:\n msg += (\" Please change the MIDDLEWARE_CLASSES setting to use \"\n \"mezzanine.core.middleware.%s\" % self.new)\n warn(msg)\n\nfor old, new in _deprecated.items():\n globals()[old] = type(native_str(old),\n (_Deprecated,),\n {\"old\": old, \"new\": new})\n\n\nclass AdminLoginInterfaceSelectorMiddleware(object):\n \"\"\"\n Checks for a POST from the admin login view and if authentication is\n successful and the \"site\" interface is selected, redirect to the site.\n \"\"\"\n def process_view(self, request, view_func, view_args, view_kwargs):\n login_type = request.POST.get(\"mezzanine_login_interface\")\n if login_type and not request.user.is_authenticated():\n response = view_func(request, *view_args, **view_kwargs)\n if request.user.is_authenticated():\n if login_type == \"admin\":\n next = request.get_full_path()\n else:\n next = next_url(request) or \"/\"\n return HttpResponseRedirect(next)\n else:\n return response\n return None\n\n\nclass SitePermissionMiddleware(object):\n \"\"\"\n Marks the current user with a ``has_site_permission`` which is\n used in place of ``user.is_staff`` to achieve per-site staff\n access.\n \"\"\"\n def process_view(self, request, view_func, view_args, view_kwargs):\n has_site_permission = False\n if request.user.is_superuser:\n has_site_permission = True\n elif request.user.is_staff:\n lookup = {\"user\": request.user, \"sites\": current_site_id()}\n try:\n SitePermission.objects.get(**lookup)\n except SitePermission.DoesNotExist:\n admin_index = reverse(\"admin:index\")\n if request.path.startswith(admin_index):\n logout(request)\n view_func = admin.site.login\n extra_context = {\"no_site_permission\": True}\n return view_func(request, extra_context=extra_context)\n else:\n has_site_permission = True\n request.user.has_site_permission = has_site_permission\n\n\nclass TemplateForDeviceMiddleware(object):\n \"\"\"\n Inserts device-specific templates to the template list.\n \"\"\"\n def process_template_response(self, request, response):\n if hasattr(response, \"template_name\"):\n if not isinstance(response.template_name, Template):\n templates = templates_for_device(request,\n response.template_name)\n response.template_name = templates\n return response\n\n\nclass TemplateForHostMiddleware(object):\n \"\"\"\n Inserts host-specific templates to the template list.\n \"\"\"\n def process_template_response(self, request, response):\n if hasattr(response, \"template_name\"):\n if not isinstance(response.template_name, Template):\n templates = templates_for_host(request,\n response.template_name)\n response.template_name = templates\n return response\n\n\nclass UpdateCacheMiddleware(object):\n \"\"\"\n Response phase for Mezzanine's cache middleware. Handles caching\n the response, and then performing the second phase of rendering,\n for content enclosed by the ``nevercache`` tag.\n \"\"\"\n\n def process_response(self, request, response):\n\n # Cache the response if all the required conditions are met.\n # Response must be marked for updating by the\n # ``FetchFromCacheMiddleware`` having a cache get miss, the\n # user must not be authenticated, the HTTP status must be OK\n # and the response mustn't include an expiry age, incicating it\n # shouldn't be cached.\n marked_for_update = getattr(request, \"_update_cache\", False)\n anon = hasattr(request, \"user\") and not request.user.is_authenticated()\n valid_status = response.status_code == 200\n timeout = get_max_age(response)\n if timeout is None:\n timeout = settings.CACHE_MIDDLEWARE_SECONDS\n if anon and valid_status and marked_for_update and timeout:\n cache_key = cache_key_prefix(request) + request.get_full_path()\n _cache_set = lambda r: cache_set(cache_key, r.content, timeout)\n if callable(getattr(response, \"render\", None)):\n response.add_post_render_callback(_cache_set)\n else:\n _cache_set(response)\n\n # Second phase rendering for non-cached template code and\n # content. Split on the delimiter the ``nevercache`` tag\n # wrapped its contents in, and render only the content\n # enclosed by it, to avoid possible template code injection.\n parts = response.content.split(nevercache_token())\n content_type = response.get(\"content-type\", \"\")\n if content_type.startswith(\"text\") and len(parts) > 1:\n # Restore csrf token from cookie - check the response\n # first as it may be being set for the first time.\n csrf_token = None\n try:\n csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value\n except KeyError:\n try:\n csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]\n except KeyError:\n pass\n if csrf_token:\n request.META[\"CSRF_COOKIE\"] = csrf_token\n context = RequestContext(request)\n for i, part in enumerate(parts):\n if i % 2:\n part = Template(part).render(context).encode(\"utf-8\")\n parts[i] = part\n response.content = \"\".join(parts)\n response[\"Content-Length\"] = len(response.content)\n if hasattr(request, '_messages'):\n # Required to clear out user messages.\n request._messages.update(response)\n return response\n\n\nclass FetchFromCacheMiddleware(object):\n \"\"\"\n Request phase for Mezzanine cache middleware. Return a response\n from cache if found, othwerwise mark the request for updating\n the cache in ``UpdateCacheMiddleware``.\n \"\"\"\n\n def process_request(self, request):\n if (cache_installed() and request.method == \"GET\" and\n not request.user.is_authenticated()):\n cache_key = cache_key_prefix(request) + request.get_full_path()\n response = cache_get(cache_key)\n # We need to force a csrf token here, as new sessions\n # won't receieve one on their first request, with cache\n # middleware running.\n csrf_mw_name = \"django.middleware.csrf.CsrfViewMiddleware\"\n if csrf_mw_name in settings.MIDDLEWARE_CLASSES:\n csrf_mw = CsrfViewMiddleware()\n csrf_mw.process_view(request, lambda x: None, None, None)\n get_token(request)\n if response is None:\n request._update_cache = True\n else:\n return HttpResponse(response)\n\n\nclass SSLRedirectMiddleware(object):\n \"\"\"\n Handles redirections required for SSL when ``SSL_ENABLED`` is ``True``.\n\n If ``SSL_FORCE_HOST`` is ``True``, and is not the current host,\n redirect to it.\n\n Also ensure URLs defined by ``SSL_FORCE_URL_PREFIXES`` are redirect\n to HTTPS, and redirect all other URLs to HTTP if on HTTPS.\n \"\"\"\n def process_request(self, request):\n settings.use_editable()\n force_host = settings.SSL_FORCE_HOST\n if force_host and request.get_host().split(\":\")[0] != force_host:\n url = \"http://%s%s\" % (force_host, request.get_full_path())\n return HttpResponsePermanentRedirect(url)\n if settings.SSL_ENABLED and not settings.DEV_SERVER:\n url = \"%s%s\" % (request.get_host(), request.get_full_path())\n if request.path.startswith(settings.SSL_FORCE_URL_PREFIXES):\n if not request.is_secure():\n return HttpResponseRedirect(\"https://%s\" % url)\n elif request.is_secure() and settings.SSL_FORCED_PREFIXES_ONLY:\n return HttpResponseRedirect(\"http://%s\" % url)\n\n\nclass RedirectFallbackMiddleware(object):\n \"\"\"\n Port of Django's ``RedirectFallbackMiddleware`` that uses\n Mezzanine's approach for determining the current site.\n \"\"\"\n\n def __init__(self):\n if \"django.contrib.redirects\" not in settings.INSTALLED_APPS:\n raise MiddlewareNotUsed\n\n def process_response(self, request, response):\n if response.status_code == 404:\n lookup = {\n \"site_id\": current_site_id(),\n \"old_path\": request.get_full_path(),\n }\n try:\n redirect = Redirect.objects.get(**lookup)\n except Redirect.DoesNotExist:\n pass\n else:\n if not redirect.new_path:\n response = HttpResponseGone()\n else:\n response = HttpResponseRedirect(redirect.new_path)\n return response\n", "path": "mezzanine/core/middleware.py"}]} | 3,634 | 278 |
gh_patches_debug_14458 | rasdani/github-patches | git_diff | kovidgoyal__kitty-5211 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
MacOS: Launch OS Window via Remote Control
**Describe the bug**
Ref: https://github.com/kovidgoyal/kitty/issues/45#issuecomment-915753960
Remote control via socket doesn't work opening a new OS window unless there is an existing window open already.
**To Reproduce**
Steps to reproduce the behavior:
1. Launch kitty without window:
````
kitty --config NONE --listen-on=unix:/tmp/scratch -o allow_remote_control=yes -o macos_quit_when_last_window_closed=no -1 --instance-group scratch false &
````
2. Attempt to open OS Window using remote control:
````
kitty @ --to unix:/tmp/scratch launch --type=os-window --title=test
````
3. No window opens up
4. Right click icon -> Open OS Window
5. Reattempt remote control:
````
kitty @ --to unix:/tmp/scratch launch --type=os-window --title=test
````
6. Window opens up fine with title "test"
**Environment details**
```
kitty 0.25.2 created by Kovid Goyal
Darwin gtd.lan 21.5.0 Darwin Kernel Version 21.5.0: Tue Apr 26 21:08:22 PDT 2022; root:xnu-8020.121.3~4/RELEASE_X86_64 x86_64
ProductName: macOS ProductVersion: 12.4 BuildVersion: 21F79
Frozen: True
Paths:
kitty: /Applications/kitty.app/Contents/MacOS/kitty
base dir: /Applications/kitty.app/Contents/Resources/kitty
extensions dir: /Applications/kitty.app/Contents/Resources/Python/lib/kitty-extensions
system shell: /bin/zsh
Loaded config overrides:
allow_remote_control yes
macos_quit_when_last_window_closed no
Config options different from defaults:
allow_remote_control y
Important environment variables seen by the kitty process:
PATH /usr/local/opt/coreutils/libexec/gnubin:/Users/hars/.config/bin:/Users/hars/.dwm/statusbar:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin:/usr/local/opt/coreutils/libexec/gnubin:/Users/hars/.config/bin:/Users/hars/.dwm/statusbar:/Applications/kitty.app/Contents/MacOS:/Users/hars/.local/share/sheldon/repos/github.com/kazhala/dotbare:/usr/local/opt/fzf/bin
LANG en_AU.UTF-8
VISUAL nvim
EDITOR nvim
SHELL /bin/zsh
USER hars
XDG_CONFIG_HOME /Users/hars/.config
XDG_CACHE_HOME /Users/hars/.cache
```
**Additional context**
Also tried ``new-window --window-type=os``
</issue>
<code>
[start of kitty/rc/launch.py]
1 #!/usr/bin/env python
2 # License: GPLv3 Copyright: 2020, Kovid Goyal <kovid at kovidgoyal.net>
3
4
5 from typing import TYPE_CHECKING, Optional
6
7 from kitty.cli_stub import LaunchCLIOptions
8 from kitty.launch import (
9 launch as do_launch, options_spec as launch_options_spec,
10 parse_launch_args
11 )
12
13 from .base import (
14 MATCH_TAB_OPTION, ArgsType, Boss, PayloadGetType, PayloadType, RCOptions,
15 RemoteCommand, ResponseType, Window
16 )
17
18 if TYPE_CHECKING:
19 from kitty.cli_stub import LaunchRCOptions as CLIOptions
20
21
22 class Launch(RemoteCommand):
23
24 '''
25 args+: The command line to run in the new window, as a list, use an empty list to run the default shell
26 match: The tab to open the new window in
27 window_title: Title for the new window
28 cwd: Working directory for the new window
29 env: List of environment variables of the form NAME=VALUE
30 tab_title: Title for the new tab
31 type: The type of window to open
32 keep_focus: Boolean indicating whether the current window should retain focus or not
33 copy_colors: Boolean indicating whether to copy the colors from the current window
34 copy_cmdline: Boolean indicating whether to copy the cmdline from the current window
35 copy_env: Boolean indicating whether to copy the environ from the current window
36 hold: Boolean indicating whether to keep window open after cmd exits
37 location: Where in the tab to open the new window
38 allow_remote_control: Boolean indicating whether to allow remote control from the new window
39 stdin_source: Where to get stdin for thew process from
40 stdin_add_formatting: Boolean indicating whether to add formatting codes to stdin
41 stdin_add_line_wrap_markers: Boolean indicating whether to add line wrap markers to stdin
42 no_response: Boolean indicating whether to send back the window id
43 marker: Specification for marker for new window, for example: "text 1 ERROR"
44 logo: Path to window logo
45 logo_position: Window logo position as string or empty string to use default
46 logo_alpha: Window logo alpha or -1 to use default
47 self: Boolean, if True use tab the command was run in
48 '''
49
50 short_desc = 'Run an arbitrary process in a new window/tab'
51 desc = (
52 'Prints out the id of the newly opened window. Any command line arguments'
53 ' are assumed to be the command line used to run in the new window, if none'
54 ' are provided, the default shell is run. For example:'
55 ' :code:`kitty @ launch --title=Email mutt`.'
56 )
57 options_spec = MATCH_TAB_OPTION + '\n\n' + '''\
58 --no-response
59 type=bool-set
60 Do not print out the id of the newly created window.
61
62
63 --self
64 type=bool-set
65 If specified the tab containing the window this command is run in is used
66 instead of the active tab
67 ''' + '\n\n' + launch_options_spec().replace(':option:`launch', ':option:`kitty @ launch')
68 argspec = '[CMD ...]'
69
70 def message_to_kitty(self, global_opts: RCOptions, opts: 'CLIOptions', args: ArgsType) -> PayloadType:
71 ans = {'args': args or []}
72 for attr, val in opts.__dict__.items():
73 ans[attr] = val
74 return ans
75
76 def response_from_kitty(self, boss: Boss, window: Optional[Window], payload_get: PayloadGetType) -> ResponseType:
77 default_opts = parse_launch_args()[0]
78 opts = LaunchCLIOptions()
79 for key, default_value in default_opts.__dict__.items():
80 val = payload_get(key)
81 if val is None:
82 val = default_value
83 setattr(opts, key, val)
84 tabs = self.tabs_for_match_payload(boss, window, payload_get)
85 if tabs and tabs[0]:
86 w = do_launch(boss, opts, payload_get('args') or [], target_tab=tabs[0])
87 return None if payload_get('no_response') else str(getattr(w, 'id', 0))
88 return None
89
90
91 launch = Launch()
92
[end of kitty/rc/launch.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kitty/rc/launch.py b/kitty/rc/launch.py
--- a/kitty/rc/launch.py
+++ b/kitty/rc/launch.py
@@ -81,11 +81,14 @@
if val is None:
val = default_value
setattr(opts, key, val)
+ target_tab = None
tabs = self.tabs_for_match_payload(boss, window, payload_get)
if tabs and tabs[0]:
- w = do_launch(boss, opts, payload_get('args') or [], target_tab=tabs[0])
- return None if payload_get('no_response') else str(getattr(w, 'id', 0))
- return None
+ target_tab = tabs[0]
+ elif payload_get('type') not in ('os-window', 'background'):
+ return None
+ w = do_launch(boss, opts, payload_get('args') or [], target_tab=target_tab)
+ return None if payload_get('no_response') else str(getattr(w, 'id', 0))
launch = Launch()
| {"golden_diff": "diff --git a/kitty/rc/launch.py b/kitty/rc/launch.py\n--- a/kitty/rc/launch.py\n+++ b/kitty/rc/launch.py\n@@ -81,11 +81,14 @@\n if val is None:\n val = default_value\n setattr(opts, key, val)\n+ target_tab = None\n tabs = self.tabs_for_match_payload(boss, window, payload_get)\n if tabs and tabs[0]:\n- w = do_launch(boss, opts, payload_get('args') or [], target_tab=tabs[0])\n- return None if payload_get('no_response') else str(getattr(w, 'id', 0))\n- return None\n+ target_tab = tabs[0]\n+ elif payload_get('type') not in ('os-window', 'background'):\n+ return None\n+ w = do_launch(boss, opts, payload_get('args') or [], target_tab=target_tab)\n+ return None if payload_get('no_response') else str(getattr(w, 'id', 0))\n \n \n launch = Launch()\n", "issue": "MacOS: Launch OS Window via Remote Control \n**Describe the bug** \r\n\r\nRef: https://github.com/kovidgoyal/kitty/issues/45#issuecomment-915753960\r\n\r\nRemote control via socket doesn't work opening a new OS window unless there is an existing window open already. \r\n\r\n**To Reproduce** \r\n\r\nSteps to reproduce the behavior:\r\n1. Launch kitty without window:\r\n````\r\nkitty --config NONE --listen-on=unix:/tmp/scratch -o allow_remote_control=yes -o macos_quit_when_last_window_closed=no -1 --instance-group scratch false &\r\n````\r\n2. Attempt to open OS Window using remote control:\r\n````\r\nkitty @ --to unix:/tmp/scratch launch --type=os-window --title=test\r\n````\r\n3. No window opens up\r\n\r\n4. Right click icon -> Open OS Window\r\n\r\n5. Reattempt remote control:\r\n````\r\nkitty @ --to unix:/tmp/scratch launch --type=os-window --title=test\r\n````\r\n6. Window opens up fine with title \"test\"\r\n\r\n**Environment details**\r\n```\r\nkitty 0.25.2 created by Kovid Goyal\r\nDarwin gtd.lan 21.5.0 Darwin Kernel Version 21.5.0: Tue Apr 26 21:08:22 PDT 2022; root:xnu-8020.121.3~4/RELEASE_X86_64 x86_64\r\nProductName:\tmacOS ProductVersion:\t12.4 BuildVersion:\t21F79\r\nFrozen: True\r\nPaths:\r\n kitty: /Applications/kitty.app/Contents/MacOS/kitty\r\n base dir: /Applications/kitty.app/Contents/Resources/kitty\r\n extensions dir: /Applications/kitty.app/Contents/Resources/Python/lib/kitty-extensions\r\n system shell: /bin/zsh\r\nLoaded config overrides:\r\n allow_remote_control yes\r\n macos_quit_when_last_window_closed no\r\n\r\nConfig options different from defaults:\r\nallow_remote_control y\r\n\r\nImportant environment variables seen by the kitty process:\r\n\tPATH /usr/local/opt/coreutils/libexec/gnubin:/Users/hars/.config/bin:/Users/hars/.dwm/statusbar:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin:/usr/local/opt/coreutils/libexec/gnubin:/Users/hars/.config/bin:/Users/hars/.dwm/statusbar:/Applications/kitty.app/Contents/MacOS:/Users/hars/.local/share/sheldon/repos/github.com/kazhala/dotbare:/usr/local/opt/fzf/bin\r\n\tLANG en_AU.UTF-8\r\n\tVISUAL nvim\r\n\tEDITOR nvim\r\n\tSHELL /bin/zsh\r\n\tUSER hars\r\n\tXDG_CONFIG_HOME /Users/hars/.config\r\n\tXDG_CACHE_HOME /Users/hars/.cache\r\n\r\n```\r\n**Additional context**\r\n\r\nAlso tried ``new-window --window-type=os``\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# License: GPLv3 Copyright: 2020, Kovid Goyal <kovid at kovidgoyal.net>\n\n\nfrom typing import TYPE_CHECKING, Optional\n\nfrom kitty.cli_stub import LaunchCLIOptions\nfrom kitty.launch import (\n launch as do_launch, options_spec as launch_options_spec,\n parse_launch_args\n)\n\nfrom .base import (\n MATCH_TAB_OPTION, ArgsType, Boss, PayloadGetType, PayloadType, RCOptions,\n RemoteCommand, ResponseType, Window\n)\n\nif TYPE_CHECKING:\n from kitty.cli_stub import LaunchRCOptions as CLIOptions\n\n\nclass Launch(RemoteCommand):\n\n '''\n args+: The command line to run in the new window, as a list, use an empty list to run the default shell\n match: The tab to open the new window in\n window_title: Title for the new window\n cwd: Working directory for the new window\n env: List of environment variables of the form NAME=VALUE\n tab_title: Title for the new tab\n type: The type of window to open\n keep_focus: Boolean indicating whether the current window should retain focus or not\n copy_colors: Boolean indicating whether to copy the colors from the current window\n copy_cmdline: Boolean indicating whether to copy the cmdline from the current window\n copy_env: Boolean indicating whether to copy the environ from the current window\n hold: Boolean indicating whether to keep window open after cmd exits\n location: Where in the tab to open the new window\n allow_remote_control: Boolean indicating whether to allow remote control from the new window\n stdin_source: Where to get stdin for thew process from\n stdin_add_formatting: Boolean indicating whether to add formatting codes to stdin\n stdin_add_line_wrap_markers: Boolean indicating whether to add line wrap markers to stdin\n no_response: Boolean indicating whether to send back the window id\n marker: Specification for marker for new window, for example: \"text 1 ERROR\"\n logo: Path to window logo\n logo_position: Window logo position as string or empty string to use default\n logo_alpha: Window logo alpha or -1 to use default\n self: Boolean, if True use tab the command was run in\n '''\n\n short_desc = 'Run an arbitrary process in a new window/tab'\n desc = (\n 'Prints out the id of the newly opened window. Any command line arguments'\n ' are assumed to be the command line used to run in the new window, if none'\n ' are provided, the default shell is run. For example:'\n ' :code:`kitty @ launch --title=Email mutt`.'\n )\n options_spec = MATCH_TAB_OPTION + '\\n\\n' + '''\\\n--no-response\ntype=bool-set\nDo not print out the id of the newly created window.\n\n\n--self\ntype=bool-set\nIf specified the tab containing the window this command is run in is used\ninstead of the active tab\n ''' + '\\n\\n' + launch_options_spec().replace(':option:`launch', ':option:`kitty @ launch')\n argspec = '[CMD ...]'\n\n def message_to_kitty(self, global_opts: RCOptions, opts: 'CLIOptions', args: ArgsType) -> PayloadType:\n ans = {'args': args or []}\n for attr, val in opts.__dict__.items():\n ans[attr] = val\n return ans\n\n def response_from_kitty(self, boss: Boss, window: Optional[Window], payload_get: PayloadGetType) -> ResponseType:\n default_opts = parse_launch_args()[0]\n opts = LaunchCLIOptions()\n for key, default_value in default_opts.__dict__.items():\n val = payload_get(key)\n if val is None:\n val = default_value\n setattr(opts, key, val)\n tabs = self.tabs_for_match_payload(boss, window, payload_get)\n if tabs and tabs[0]:\n w = do_launch(boss, opts, payload_get('args') or [], target_tab=tabs[0])\n return None if payload_get('no_response') else str(getattr(w, 'id', 0))\n return None\n\n\nlaunch = Launch()\n", "path": "kitty/rc/launch.py"}]} | 2,265 | 236 |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 0