index
int64 0
10k
| blob_id
stringlengths 40
40
| code
stringlengths 13
1.2M
| steps
listlengths 1
578
| error
bool 2
classes |
---|---|---|---|---|
9,700 |
60b70171dededd758e00d6446842355a47b54cc0
|
#!/usr/bin/env python3
import sys
import collections as cl
def II(): return int(sys.stdin.readline())
def MI(): return map(int, sys.stdin.readline().split())
def LI(): return list(map(int, sys.stdin.readline().split()))
MOD = 998244353
def main():
N, K = MI()
kukan = []
for _ in range(K):
tmp = LI()
kukan.append(tmp)
dp = [0] * (N + 1)
dp[1] = 1
dp_sum = [0] * (N+1)
dp_sum[1] = 1
for i in range(N+1):
for k in range(K):
l, r = kukan[k]
pre_l = i - r
pre_r = i - l
if pre_r < 0:
continue
pre_l = max(pre_l, 0)
dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]
dp_sum[i] = dp[i] + dp_sum[i-1]
dp_sum[i] %= MOD
dp[i] %= MOD
print(dp[-1])
main()
|
[
"#!/usr/bin/env python3\nimport sys\nimport collections as cl\n\n\ndef II(): return int(sys.stdin.readline())\n\n\ndef MI(): return map(int, sys.stdin.readline().split())\n\n\ndef LI(): return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N+1)\n dp_sum[1] = 1\n\n for i in range(N+1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n\n dp_sum[i] = dp[i] + dp_sum[i-1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n\n print(dp[-1])\n\n\nmain()\n",
"import sys\nimport collections as cl\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"<import token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"<import token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\n<assignment token>\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"<import token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\n<assignment token>\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\n<code token>\n",
"<import token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\n<assignment token>\n<function token>\n<code token>\n",
"<import token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\n<function token>\n<assignment token>\n<function token>\n<code token>\n",
"<import token>\n<function token>\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\n<function token>\n<assignment token>\n<function token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<function token>\n<code token>\n"
] | false |
9,701 |
51868f26599c5878f8eb976d928c30d0bf61547d
|
import collections
def range(state):
ran = state["tmp"]["analysis"]["range"]
rang = {
key : [ state["rank"][i] for i in val & ran ]
for key, val in state["tmp"]["analysis"]["keys"].items()
if val & ran
}
for item in state["tmp"]["items"]:
item.setdefault("rank", 0)
item_keys = set(item.keys())
rang_keys = set(rang.keys())
keys = item_keys & rang_keys
for key in keys:
val = item[key]
ruls = rang[key]
for rule in ruls:
item["rank"] += _rank(val, rule)
def _rank(val, rule):
if "rank" not in rule or "val" not in rule:
return 0
if isinstance(val, dict):
return sum([ _rank(val, rule) for val in val.values() ])
if isinstance(val, collections.Iterable):
return sum([ _rank(val, rule) for val in val ])
if "from" in rule["val"] and "to" in rule["val"]:
return rule["rank"] if rule["val"]["from"] < val < rule["val"]["to"] else 0
if "from" in rule["val"]:
return rule["rank"] if rule["val"]["from"] < val else 0
if "to" in rule["val"]:
return rule["rank"] if val < rule["val"]["to"] else 0
return 0
|
[
"import collections\n\ndef range(state):\n\tran = state[\"tmp\"][\"analysis\"][\"range\"]\n\n\trang = {\n\t\tkey : [ state[\"rank\"][i] for i in val & ran ]\n\t\tfor key, val in state[\"tmp\"][\"analysis\"][\"keys\"].items()\n\t\tif val & ran\n\t}\n\n\tfor item in state[\"tmp\"][\"items\"]:\n\t\titem.setdefault(\"rank\", 0)\n\n\t\titem_keys = set(item.keys())\n\t\trang_keys = set(rang.keys())\n\t\tkeys = item_keys & rang_keys\n\n\t\tfor key in keys:\n\t\t\tval = item[key]\n\t\t\truls = rang[key]\n\n\t\t\tfor rule in ruls:\n\t\t\t\titem[\"rank\"] += _rank(val, rule)\n\ndef _rank(val, rule):\n\tif \"rank\" not in rule or \"val\" not in rule:\n\t\treturn 0\n\n\tif isinstance(val, dict):\n\t\treturn sum([ _rank(val, rule) for val in val.values() ])\n\n\tif isinstance(val, collections.Iterable):\n\t\treturn sum([ _rank(val, rule) for val in val ])\n\n\tif \"from\" in rule[\"val\"] and \"to\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if rule[\"val\"][\"from\"] < val < rule[\"val\"][\"to\"] else 0\n\n\tif \"from\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if rule[\"val\"][\"from\"] < val else 0\n\n\tif \"to\" in rule[\"val\"]:\n\t\treturn rule[\"rank\"] if val < rule[\"val\"][\"to\"] else 0\n\n\treturn 0\n",
"import collections\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\ndef _rank(val, rule):\n if 'rank' not in rule or 'val' not in rule:\n return 0\n if isinstance(val, dict):\n return sum([_rank(val, rule) for val in val.values()])\n if isinstance(val, collections.Iterable):\n return sum([_rank(val, rule) for val in val])\n if 'from' in rule['val'] and 'to' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val < rule['val']['to'\n ] else 0\n if 'from' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val else 0\n if 'to' in rule['val']:\n return rule['rank'] if val < rule['val']['to'] else 0\n return 0\n",
"<import token>\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\ndef _rank(val, rule):\n if 'rank' not in rule or 'val' not in rule:\n return 0\n if isinstance(val, dict):\n return sum([_rank(val, rule) for val in val.values()])\n if isinstance(val, collections.Iterable):\n return sum([_rank(val, rule) for val in val])\n if 'from' in rule['val'] and 'to' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val < rule['val']['to'\n ] else 0\n if 'from' in rule['val']:\n return rule['rank'] if rule['val']['from'] < val else 0\n if 'to' in rule['val']:\n return rule['rank'] if val < rule['val']['to'] else 0\n return 0\n",
"<import token>\n\n\ndef range(state):\n ran = state['tmp']['analysis']['range']\n rang = {key: [state['rank'][i] for i in val & ran] for key, val in\n state['tmp']['analysis']['keys'].items() if val & ran}\n for item in state['tmp']['items']:\n item.setdefault('rank', 0)\n item_keys = set(item.keys())\n rang_keys = set(rang.keys())\n keys = item_keys & rang_keys\n for key in keys:\n val = item[key]\n ruls = rang[key]\n for rule in ruls:\n item['rank'] += _rank(val, rule)\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n"
] | false |
9,702 |
986df5a41bc87ecb390dfbd1db9e1f5cd6c5b8fb
|
import argparse
import cv2
import numpy as np
refPt = []
cropping = False
def click_and_crop(event, x, y, flags, param):
global refPt, cropping
if event == cv2.EVENT_LBUTTONDOWN:
refPt = [(x, y)]
cropping = True
elif event == cv2.EVENT_LBUTTONUP:
refPt.append((x, y))
cropping = False
cv2.rectangle(image, refPt[0], refPt[1], (0, 255, 0), 2)
cv2.imshow("image", image)
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="Path to the image")
args = vars(ap.parse_args())
image = cv2.imread(args["image"])
clone = image.copy()
cv2.namedWindow("image")
cv2.setMouseCallback("image", click_and_crop)
while True:
cv2.imshow("image", image)
key = cv2.waitKey(1) & 0xFF
if key == ord("r"):
image = clone.copy()
elif key == ord("c"):
break
if len(refPt) == 2:
roi = clone[refPt[0][1]:refPt[1][1], refPt[0][0]:refPt[1][0]]
cv2.imshow("ROI", roi)
count=0
sum=np.array([0,0,0])
for i in range (0,np.size(roi,0)):
for j in range(0,np.size(roi,1)):
count+=1
sum+=roi[i,j]
print "Average bgr: ",sum/count
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"\nimport argparse\nimport cv2\nimport numpy as np\n \n\nrefPt = []\ncropping = False\n \ndef click_and_crop(event, x, y, flags, param):\n\tglobal refPt, cropping\n \n\tif event == cv2.EVENT_LBUTTONDOWN:\n\t\trefPt = [(x, y)]\n\t\tcropping = True\n \n\telif event == cv2.EVENT_LBUTTONUP:\n\t\trefPt.append((x, y))\n\t\tcropping = False\n \n\t\n\t\tcv2.rectangle(image, refPt[0], refPt[1], (0, 255, 0), 2)\n\t\tcv2.imshow(\"image\", image)\n\n\nap = argparse.ArgumentParser()\nap.add_argument(\"-i\", \"--image\", required=True, help=\"Path to the image\")\nargs = vars(ap.parse_args())\n \nimage = cv2.imread(args[\"image\"])\nclone = image.copy()\ncv2.namedWindow(\"image\")\ncv2.setMouseCallback(\"image\", click_and_crop)\n \n\nwhile True:\n\tcv2.imshow(\"image\", image)\n\tkey = cv2.waitKey(1) & 0xFF\n \n\n\tif key == ord(\"r\"):\n\t\timage = clone.copy()\n \n\telif key == ord(\"c\"):\n\t\tbreak\n \n\nif len(refPt) == 2:\n\troi = clone[refPt[0][1]:refPt[1][1], refPt[0][0]:refPt[1][0]]\n\tcv2.imshow(\"ROI\", roi)\n\tcount=0\n\tsum=np.array([0,0,0])\n\tfor i in range (0,np.size(roi,0)):\n\t\tfor j in range(0,np.size(roi,1)):\n\t\t\tcount+=1\n\t\t\tsum+=roi[i,j]\n\tprint \"Average bgr: \",sum/count\n\tcv2.waitKey(0)\n \n\ncv2.destroyAllWindows()"
] | true |
9,703 |
dd0e96a1f93cbffedc11262a883dda285f5c224c
|
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func
from extensions import bcrypt
db = SQLAlchemy()
class User(db.Model):
id = db.Column(db.Integer(), primary_key=True)
username = db.Column(db.String(255))
password = db.Column(db.String(255))
posts = db.relationship(
'Post',
backref='user',
lazy='dynamic'
)
def __init__(self, username):
self.username = username
def set_password(self,password):
self.password = bcrypt.generate_password_hash(password)
def check_password(self,password):
return bcrypt.check_password_hash(self.password,password)
def __repr__(self):
return '<User ' + self.username + '>'
tags = db.Table(
'post_tags',
db.Column('post_id', db.Integer(), db.ForeignKey('post.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id'))
)
class Post(db.Model):
id = db.Column(db.Integer(), primary_key=True)
title = db.Column(db.String(255))
text = db.Column(db.Text())
date = db.Column(db.DateTime())
user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))
comments = db.relationship(
'Comment',
backref='post',
lazy='dynamic'
)
tags = db.relationship(
'Tag',
secondary=tags,
backref=db.backref(
'posts',
lazy='dynamic'
)
)
def __init__(self, title):
self.title = title
def __repr__(self):
return '<Post ' + self.title + '>'
class Comment(db.Model):
id = db.Column(db.Integer(), primary_key=True)
title = db.Column(db.String(255))
text = db.Column(db.Text())
date = db.Column(db.DateTime())
post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))
def __init__(self, title):
self.title = title
def __repr__(self):
return '<Comment ' + self.title + '>'
class Tag(db.Model):
id = db.Column(db.Integer(), primary_key=True)
title = db.Column(db.String(255))
def __init__(self, title):
self.title = title
def __repr__(self):
return '<Tag ' + self.title + '>'
def sidebar_data():
recent = Post.query.order_by(
Post.date.desc()
).limit(5).all()
top_tags = db.session.query(
Tag, func.count(tags.c.post_id).label('total')
).join(
tags
).group_by(Tag).order_by('total DESC').limit(5).all()
return recent, top_tags
|
[
"from flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy import func\nfrom extensions import bcrypt\n\ndb = SQLAlchemy()\n\n\nclass User(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n username = db.Column(db.String(255))\n password = db.Column(db.String(255))\n posts = db.relationship(\n 'Post',\n backref='user',\n lazy='dynamic'\n )\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self,password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self,password):\n return bcrypt.check_password_hash(self.password,password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\ntags = db.Table(\n 'post_tags',\n db.Column('post_id', db.Integer(), db.ForeignKey('post.id')),\n db.Column('tag_id', db.Integer, db.ForeignKey('tag.id'))\n)\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship(\n 'Comment',\n backref='post',\n lazy='dynamic'\n )\n tags = db.relationship(\n 'Tag',\n secondary=tags,\n backref=db.backref(\n 'posts',\n lazy='dynamic'\n )\n )\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\ndef sidebar_data():\n recent = Post.query.order_by(\n Post.date.desc()\n ).limit(5).all()\n top_tags = db.session.query(\n Tag, func.count(tags.c.post_id).label('total')\n ).join(\n tags\n ).group_by(Tag).order_by('total DESC').limit(5).all()\n return recent, top_tags",
"from flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy import func\nfrom extensions import bcrypt\ndb = SQLAlchemy()\n\n\nclass User(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n username = db.Column(db.String(255))\n password = db.Column(db.String(255))\n posts = db.relationship('Post', backref='user', lazy='dynamic')\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\ntags = db.Table('post_tags', db.Column('post_id', db.Integer(), db.\n ForeignKey('post.id')), db.Column('tag_id', db.Integer, db.ForeignKey(\n 'tag.id')))\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\ndef sidebar_data():\n recent = Post.query.order_by(Post.date.desc()).limit(5).all()\n top_tags = db.session.query(Tag, func.count(tags.c.post_id).label('total')\n ).join(tags).group_by(Tag).order_by('total DESC').limit(5).all()\n return recent, top_tags\n",
"<import token>\ndb = SQLAlchemy()\n\n\nclass User(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n username = db.Column(db.String(255))\n password = db.Column(db.String(255))\n posts = db.relationship('Post', backref='user', lazy='dynamic')\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\ntags = db.Table('post_tags', db.Column('post_id', db.Integer(), db.\n ForeignKey('post.id')), db.Column('tag_id', db.Integer, db.ForeignKey(\n 'tag.id')))\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\ndef sidebar_data():\n recent = Post.query.order_by(Post.date.desc()).limit(5).all()\n top_tags = db.session.query(Tag, func.count(tags.c.post_id).label('total')\n ).join(tags).group_by(Tag).order_by('total DESC').limit(5).all()\n return recent, top_tags\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n username = db.Column(db.String(255))\n password = db.Column(db.String(255))\n posts = db.relationship('Post', backref='user', lazy='dynamic')\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\ndef sidebar_data():\n recent = Post.query.order_by(Post.date.desc()).limit(5).all()\n top_tags = db.session.query(Tag, func.count(tags.c.post_id).label('total')\n ).join(tags).group_by(Tag).order_by('total DESC').limit(5).all()\n return recent, top_tags\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n username = db.Column(db.String(255))\n password = db.Column(db.String(255))\n posts = db.relationship('Post', backref='user', lazy='dynamic')\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, username):\n self.username = username\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def set_password(self, password):\n self.password = bcrypt.generate_password_hash(password)\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n def check_password(self, password):\n return bcrypt.check_password_hash(self.password, password)\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def __repr__(self):\n return '<User ' + self.username + '>'\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\nclass User(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n\n\nclass Post(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))\n comments = db.relationship('Comment', backref='post', lazy='dynamic')\n tags = db.relationship('Tag', secondary=tags, backref=db.backref(\n 'posts', lazy='dynamic'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n\n\nclass Post(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Post ' + self.title + '>'\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n\n\nclass Post(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n <function token>\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n\n\nclass Post(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n\n\nclass Comment(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n text = db.Column(db.Text())\n date = db.Column(db.DateTime())\n post_id = db.Column(db.Integer(), db.ForeignKey('post.id'))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n\n\nclass Comment(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Comment ' + self.title + '>'\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n\n\nclass Comment(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n <function token>\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n\n\nclass Comment(db.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer(), primary_key=True)\n title = db.Column(db.String(255))\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Tag(db.Model):\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n\n def __repr__(self):\n return '<Tag ' + self.title + '>'\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Tag(db.Model):\n <assignment token>\n <assignment token>\n\n def __init__(self, title):\n self.title = title\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Tag(db.Model):\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n<function token>\n"
] | false |
9,704 |
78c4e14e5afdf857082b60bf4020f0f785d93a0d
|
from p5 import *
import numpy as np
from numpy.random import default_rng
from boids import Boid
from data import Data
n=30;
width = 1920
height = 1080
flock=[]
infected=[]
rng = default_rng()
frames=0
for i in range(n):
x = rng.integers(low=0, high=1920)
y = rng.integers(low=0, high=1080)
if i==0:
flock.append(Boid(x,y, width, height,infected=True,curado=False,alive=True))
else:
flock.append(Boid(x,y, width, height,infected=False,curado=False,alive=True))
def setup():
#this happens just once
size(width, height) #instead of create_canvas
def draw():
global flock,frames
background(30, 30, 47)
for boid in flock:
boid.edges()
boid.apply_behaviour(flock)
boid.infection(flock)
boid.update()
boid.show()
boid.livesordie()
Data.count(flock)
run()
|
[
"from p5 import *\nimport numpy as np\nfrom numpy.random import default_rng\nfrom boids import Boid\nfrom data import Data\nn=30;\nwidth = 1920\nheight = 1080\nflock=[]\ninfected=[]\nrng = default_rng()\nframes=0\n\nfor i in range(n):\n x = rng.integers(low=0, high=1920)\n y = rng.integers(low=0, high=1080)\n\n if i==0:\n flock.append(Boid(x,y, width, height,infected=True,curado=False,alive=True))\n else:\n flock.append(Boid(x,y, width, height,infected=False,curado=False,alive=True))\n\ndef setup():\n #this happens just once\n size(width, height) #instead of create_canvas\n\n\ndef draw():\n global flock,frames\n \n background(30, 30, 47)\n\n\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update() \n boid.show()\n boid.livesordie()\n Data.count(flock)\n \n\nrun()",
"from p5 import *\nimport numpy as np\nfrom numpy.random import default_rng\nfrom boids import Boid\nfrom data import Data\nn = 30\nwidth = 1920\nheight = 1080\nflock = []\ninfected = []\nrng = default_rng()\nframes = 0\nfor i in range(n):\n x = rng.integers(low=0, high=1920)\n y = rng.integers(low=0, high=1080)\n if i == 0:\n flock.append(Boid(x, y, width, height, infected=True, curado=False,\n alive=True))\n else:\n flock.append(Boid(x, y, width, height, infected=False, curado=False,\n alive=True))\n\n\ndef setup():\n size(width, height)\n\n\ndef draw():\n global flock, frames\n background(30, 30, 47)\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update()\n boid.show()\n boid.livesordie()\n Data.count(flock)\n\n\nrun()\n",
"<import token>\nn = 30\nwidth = 1920\nheight = 1080\nflock = []\ninfected = []\nrng = default_rng()\nframes = 0\nfor i in range(n):\n x = rng.integers(low=0, high=1920)\n y = rng.integers(low=0, high=1080)\n if i == 0:\n flock.append(Boid(x, y, width, height, infected=True, curado=False,\n alive=True))\n else:\n flock.append(Boid(x, y, width, height, infected=False, curado=False,\n alive=True))\n\n\ndef setup():\n size(width, height)\n\n\ndef draw():\n global flock, frames\n background(30, 30, 47)\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update()\n boid.show()\n boid.livesordie()\n Data.count(flock)\n\n\nrun()\n",
"<import token>\n<assignment token>\nfor i in range(n):\n x = rng.integers(low=0, high=1920)\n y = rng.integers(low=0, high=1080)\n if i == 0:\n flock.append(Boid(x, y, width, height, infected=True, curado=False,\n alive=True))\n else:\n flock.append(Boid(x, y, width, height, infected=False, curado=False,\n alive=True))\n\n\ndef setup():\n size(width, height)\n\n\ndef draw():\n global flock, frames\n background(30, 30, 47)\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update()\n boid.show()\n boid.livesordie()\n Data.count(flock)\n\n\nrun()\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef setup():\n size(width, height)\n\n\ndef draw():\n global flock, frames\n background(30, 30, 47)\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update()\n boid.show()\n boid.livesordie()\n Data.count(flock)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n\n\ndef draw():\n global flock, frames\n background(30, 30, 47)\n for boid in flock:\n boid.edges()\n boid.apply_behaviour(flock)\n boid.infection(flock)\n boid.update()\n boid.show()\n boid.livesordie()\n Data.count(flock)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,705 |
7764effac0b95ad8f62b91dd470c1d0e40704a7d
|
''' tk_image_view_url_io.py
display an image from a URL using Tkinter, PIL and data_stream
tested with Python27 and Python33 by vegaseat 01mar2013
'''
import io
# allows for image formats other than gif
from PIL import Image, ImageTk
try:
# Python2
import Tkinter as tk
from urllib2 import urlopen
except ImportError:
# Python3
import tkinter as tk
from urllib.request import urlopen
root = tk.Tk()
# find yourself a picture on an internet web page you like
# (right click on the picture, under properties copy the address)
#url = "http://www.google.com/intl/en/images/logo.gif"
# or use image previously downloaded to tinypic.com
#url = "http://i48.tinypic.com/w6sjn6.jpg"
#url = "http://i50.tinypic.com/34g8vo5.jpg"
#url = "https://media.geeksforgeeks.org/wp-content/uploads/Computer-Networking-Diagram.png"
url = "https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg"
image_bytes = urlopen(url).read()
# internal data file
data_stream = io.BytesIO(image_bytes)
# open as a PIL image object
pil_image = Image.open(data_stream)
# optionally show image info
# get the size of the image
w, h = pil_image.size
# split off image file name
fname = url.split('/')[-1]
sf = "{} ({}x{})".format(fname, w, h)
root.title(sf)
# convert PIL image object to Tkinter PhotoImage object
tk_image = ImageTk.PhotoImage(pil_image)
# put the image on a typical widget
label = tk.Label(root, image=tk_image, bg='brown')
label.pack(padx=5, pady=5)
root.mainloop()
|
[
"''' tk_image_view_url_io.py\ndisplay an image from a URL using Tkinter, PIL and data_stream\ntested with Python27 and Python33 by vegaseat 01mar2013\n'''\n\nimport io\n# allows for image formats other than gif\nfrom PIL import Image, ImageTk\ntry:\n # Python2\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n # Python3\n import tkinter as tk\n from urllib.request import urlopen\n\nroot = tk.Tk()\n\n# find yourself a picture on an internet web page you like\n# (right click on the picture, under properties copy the address)\n#url = \"http://www.google.com/intl/en/images/logo.gif\"\n# or use image previously downloaded to tinypic.com\n#url = \"http://i48.tinypic.com/w6sjn6.jpg\"\n#url = \"http://i50.tinypic.com/34g8vo5.jpg\"\n#url = \"https://media.geeksforgeeks.org/wp-content/uploads/Computer-Networking-Diagram.png\"\nurl = \"https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg\"\nimage_bytes = urlopen(url).read()\n# internal data file\ndata_stream = io.BytesIO(image_bytes)\n# open as a PIL image object\npil_image = Image.open(data_stream)\n\n# optionally show image info\n# get the size of the image\nw, h = pil_image.size\n# split off image file name\nfname = url.split('/')[-1]\nsf = \"{} ({}x{})\".format(fname, w, h)\nroot.title(sf)\n\n# convert PIL image object to Tkinter PhotoImage object\ntk_image = ImageTk.PhotoImage(pil_image)\n\n# put the image on a typical widget\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\n\nroot.mainloop()\n",
"<docstring token>\nimport io\nfrom PIL import Image, ImageTk\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\nroot = tk.Tk()\nurl = (\n 'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'\n )\nimage_bytes = urlopen(url).read()\ndata_stream = io.BytesIO(image_bytes)\npil_image = Image.open(data_stream)\nw, h = pil_image.size\nfname = url.split('/')[-1]\nsf = '{} ({}x{})'.format(fname, w, h)\nroot.title(sf)\ntk_image = ImageTk.PhotoImage(pil_image)\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"<docstring token>\n<import token>\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\nroot = tk.Tk()\nurl = (\n 'https://static.toiimg.com/thumb/msid-79594506,imgsize-721231,width-400,resizemode-4/79594506.jpg'\n )\nimage_bytes = urlopen(url).read()\ndata_stream = io.BytesIO(image_bytes)\npil_image = Image.open(data_stream)\nw, h = pil_image.size\nfname = url.split('/')[-1]\nsf = '{} ({}x{})'.format(fname, w, h)\nroot.title(sf)\ntk_image = ImageTk.PhotoImage(pil_image)\nlabel = tk.Label(root, image=tk_image, bg='brown')\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"<docstring token>\n<import token>\ntry:\n import Tkinter as tk\n from urllib2 import urlopen\nexcept ImportError:\n import tkinter as tk\n from urllib.request import urlopen\n<assignment token>\nroot.title(sf)\n<assignment token>\nlabel.pack(padx=5, pady=5)\nroot.mainloop()\n",
"<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,706 |
91806afea92587476ac743346b88098b197a033c
|
import pygame
import time
from menus import MainMenu
from scenes import TestWorldGen
from scenes import TestAnimation
from scenes import TestLevel2
from scenes import MainGame
import random
class GameManager:
def __init__(self):
self.screen = pygame.display.set_mode((1280, 720),
flags=pygame.FULLSCREEN |
pygame.HWSURFACE |
pygame.DOUBLEBUF) # type: pygame.Surface
self.running = True
self.delta_time = 1
self.active_scene = None
# self.load_scene(MainMenu.MainMenu, (self,))
# self.load_scene(TestWorldGen.TestWorldGen, (self,))
# self.load_scene(TestAnimation.TestAnimation, (self,))
# self.load_scene(TestLevel2.TestLevel, (self, ))
self.load_scene(MainGame.MainGame, (self,))
self.fps_font = pygame.font.Font("game_data/fonts/calling_code.ttf", 14)
self.pygame_clock = pygame.time.Clock() # type: pygame
self.pygame_clock.tick()
pygame.joystick.init()
self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.joystick.get_count())]
for joystick in self.joystick:
joystick.init()
random.seed(time.time())
self.player_joy = -1
def __del__(self):
self.exit()
def main_loop(self):
while self.running:
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
self.exit()
self.delta_time = float(self.pygame_clock.tick(60)) / (10 ** 3)
fps_text = self.fps_font.render("FPS: {}".format(round(1 / self.delta_time)), False, (255, 255, 255))
self.active_scene.main_loop(events)
self.screen.blit(fps_text, (self.screen.get_width() - fps_text.get_width(), 0))
pygame.display.flip()
def load_scene(self, scene_object, scene_parameters):
self.active_scene = scene_object(*scene_parameters)
def exit(self):
self.running = False
|
[
"import pygame\nimport time\nfrom menus import MainMenu\nfrom scenes import TestWorldGen\nfrom scenes import TestAnimation\nfrom scenes import TestLevel2\nfrom scenes import MainGame\nimport random\n\n\nclass GameManager:\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720),\n flags=pygame.FULLSCREEN |\n pygame.HWSURFACE |\n pygame.DOUBLEBUF) # type: pygame.Surface\n\n self.running = True\n\n self.delta_time = 1\n\n self.active_scene = None\n # self.load_scene(MainMenu.MainMenu, (self,))\n # self.load_scene(TestWorldGen.TestWorldGen, (self,))\n # self.load_scene(TestAnimation.TestAnimation, (self,))\n # self.load_scene(TestLevel2.TestLevel, (self, ))\n self.load_scene(MainGame.MainGame, (self,))\n\n self.fps_font = pygame.font.Font(\"game_data/fonts/calling_code.ttf\", 14)\n\n self.pygame_clock = pygame.time.Clock() # type: pygame\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n\n random.seed(time.time())\n\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n\n self.delta_time = float(self.pygame_clock.tick(60)) / (10 ** 3)\n\n fps_text = self.fps_font.render(\"FPS: {}\".format(round(1 / self.delta_time)), False, (255, 255, 255))\n\n self.active_scene.main_loop(events)\n\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.get_width(), 0))\n\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"import pygame\nimport time\nfrom menus import MainMenu\nfrom scenes import TestWorldGen\nfrom scenes import TestAnimation\nfrom scenes import TestLevel2\nfrom scenes import MainGame\nimport random\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"<import token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"<import token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n <function token>\n",
"<import token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n <function token>\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n <function token>\n",
"<import token>\n\n\nclass GameManager:\n <function token>\n <function token>\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n <function token>\n",
"<import token>\n\n\nclass GameManager:\n <function token>\n <function token>\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n <function token>\n <function token>\n",
"<import token>\n\n\nclass GameManager:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,707 |
4f06d87ec79c20206ff45ba72ab77844076be553
|
import pandas as pd
from greyatomlib.pandas_project.q01_read_csv_data_to_df.build import read_csv_data_to_df
def get_runs_counts_by_match():
ipl_df = read_csv_data_to_df("data/ipl_dataset.csv")
df1 = pd.DataFrame(ipl_df[['match_code','runs','venue']])
df2 = df1.groupby(['match_code','runs'], as_index=False).count()
df = df2.pivot(index='match_code',columns='runs')
df = df.fillna(0)
df = df.astype('int')
return df
get_runs_counts_by_match()
|
[
"\nimport pandas as pd\nfrom greyatomlib.pandas_project.q01_read_csv_data_to_df.build import read_csv_data_to_df\n\ndef get_runs_counts_by_match():\n ipl_df = read_csv_data_to_df(\"data/ipl_dataset.csv\")\n df1 = pd.DataFrame(ipl_df[['match_code','runs','venue']])\n df2 = df1.groupby(['match_code','runs'], as_index=False).count()\n df = df2.pivot(index='match_code',columns='runs')\n df = df.fillna(0)\n df = df.astype('int')\n return df\n\nget_runs_counts_by_match()\n",
"import pandas as pd\nfrom greyatomlib.pandas_project.q01_read_csv_data_to_df.build import read_csv_data_to_df\n\n\ndef get_runs_counts_by_match():\n ipl_df = read_csv_data_to_df('data/ipl_dataset.csv')\n df1 = pd.DataFrame(ipl_df[['match_code', 'runs', 'venue']])\n df2 = df1.groupby(['match_code', 'runs'], as_index=False).count()\n df = df2.pivot(index='match_code', columns='runs')\n df = df.fillna(0)\n df = df.astype('int')\n return df\n\n\nget_runs_counts_by_match()\n",
"<import token>\n\n\ndef get_runs_counts_by_match():\n ipl_df = read_csv_data_to_df('data/ipl_dataset.csv')\n df1 = pd.DataFrame(ipl_df[['match_code', 'runs', 'venue']])\n df2 = df1.groupby(['match_code', 'runs'], as_index=False).count()\n df = df2.pivot(index='match_code', columns='runs')\n df = df.fillna(0)\n df = df.astype('int')\n return df\n\n\nget_runs_counts_by_match()\n",
"<import token>\n\n\ndef get_runs_counts_by_match():\n ipl_df = read_csv_data_to_df('data/ipl_dataset.csv')\n df1 = pd.DataFrame(ipl_df[['match_code', 'runs', 'venue']])\n df2 = df1.groupby(['match_code', 'runs'], as_index=False).count()\n df = df2.pivot(index='match_code', columns='runs')\n df = df.fillna(0)\n df = df.astype('int')\n return df\n\n\n<code token>\n",
"<import token>\n<function token>\n<code token>\n"
] | false |
9,708 |
4b78c99dd6156afe960effcacb25804446310f7c
|
# MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class VirtualChassis(Base):
"""Virtual Chassis is used to get and to manage a Virtual Chassis topology and get the list of discovered appliances
The VirtualChassis class encapsulates a required virtualChassis resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'virtualChassis'
def __init__(self, parent):
super(VirtualChassis, self).__init__(parent)
@property
def DiscoveredAppliance(self):
"""An instance of the DiscoveredAppliance class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance
return DiscoveredAppliance(self)
@property
def Hypervisor(self):
"""An instance of the Hypervisor class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor
return Hypervisor(self)
@property
def IxVmCard(self):
"""An instance of the IxVmCard class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard
return IxVmCard(self)
@property
def EnableLicenseCheck(self):
"""Enables license check on port connect
Returns:
bool
"""
return self._get_attribute('enableLicenseCheck')
@EnableLicenseCheck.setter
def EnableLicenseCheck(self, value):
self._set_attribute('enableLicenseCheck', value)
@property
def Hostname(self):
"""Virtual Chassis hostname or IP
Returns:
str
"""
return self._get_attribute('hostname')
@property
def LicenseServer(self):
"""The address of the license server
Returns:
str
"""
return self._get_attribute('licenseServer')
@LicenseServer.setter
def LicenseServer(self, value):
self._set_attribute('licenseServer', value)
@property
def NtpServer(self):
"""The address of the NTP server
Returns:
str
"""
return self._get_attribute('ntpServer')
@NtpServer.setter
def NtpServer(self, value):
self._set_attribute('ntpServer', value)
@property
def StartTxDelay(self):
"""The delay amount for transmit
Returns:
str
"""
return self._get_attribute('startTxDelay')
@StartTxDelay.setter
def StartTxDelay(self, value):
self._set_attribute('startTxDelay', value)
def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer=None, StartTxDelay=None):
"""Updates a child instance of virtualChassis on the server.
Args:
EnableLicenseCheck (bool): Enables license check on port connect
LicenseServer (str): The address of the license server
NtpServer (str): The address of the NTP server
StartTxDelay (str): The delay amount for transmit
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
|
[
"# MIT LICENSE\n#\n# Copyright 1997 - 2019 by IXIA Keysight\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE. \nfrom ixnetwork_restpy.base import Base\nfrom ixnetwork_restpy.files import Files\n\n\nclass VirtualChassis(Base):\n \"\"\"Virtual Chassis is used to get and to manage a Virtual Chassis topology and get the list of discovered appliances\n The VirtualChassis class encapsulates a required virtualChassis resource which will be retrieved from the server every time the property is accessed.\n \"\"\"\n\n __slots__ = ()\n _SDM_NAME = 'virtualChassis'\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n\n @property\n def IxVmCard(self):\n \"\"\"An instance of the IxVmCard class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard\n return IxVmCard(self)\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer=None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"from ixnetwork_restpy.base import Base\nfrom ixnetwork_restpy.files import Files\n\n\nclass VirtualChassis(Base):\n \"\"\"Virtual Chassis is used to get and to manage a Virtual Chassis topology and get the list of discovered appliances\n The VirtualChassis class encapsulates a required virtualChassis resource which will be retrieved from the server every time the property is accessed.\n \"\"\"\n __slots__ = ()\n _SDM_NAME = 'virtualChassis'\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n\n @property\n def IxVmCard(self):\n \"\"\"An instance of the IxVmCard class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard\n return IxVmCard(self)\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n \"\"\"Virtual Chassis is used to get and to manage a Virtual Chassis topology and get the list of discovered appliances\n The VirtualChassis class encapsulates a required virtualChassis resource which will be retrieved from the server every time the property is accessed.\n \"\"\"\n __slots__ = ()\n _SDM_NAME = 'virtualChassis'\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n\n @property\n def IxVmCard(self):\n \"\"\"An instance of the IxVmCard class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard\n return IxVmCard(self)\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n __slots__ = ()\n _SDM_NAME = 'virtualChassis'\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n\n @property\n def IxVmCard(self):\n \"\"\"An instance of the IxVmCard class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard\n return IxVmCard(self)\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n\n @property\n def IxVmCard(self):\n \"\"\"An instance of the IxVmCard class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard.IxVmCard)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.ixvmcard.ixvmcard import IxVmCard\n return IxVmCard(self)\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n\n @property\n def EnableLicenseCheck(self):\n \"\"\"Enables license check on port connect\n\n Returns:\n bool\n \"\"\"\n return self._get_attribute('enableLicenseCheck')\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n\n @property\n def DiscoveredAppliance(self):\n \"\"\"An instance of the DiscoveredAppliance class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance.DiscoveredAppliance)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.discoveredappliance.discoveredappliance import DiscoveredAppliance\n return DiscoveredAppliance(self)\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n\n @property\n def StartTxDelay(self):\n \"\"\"The delay amount for transmit\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('startTxDelay')\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n\n def __init__(self, parent):\n super(VirtualChassis, self).__init__(parent)\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n <function token>\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n\n @property\n def Hostname(self):\n \"\"\"Virtual Chassis hostname or IP\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('hostname')\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n <function token>\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n <function token>\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n\n def update(self, EnableLicenseCheck=None, LicenseServer=None, NtpServer\n =None, StartTxDelay=None):\n \"\"\"Updates a child instance of virtualChassis on the server.\n\n Args:\n EnableLicenseCheck (bool): Enables license check on port connect\n LicenseServer (str): The address of the license server\n NtpServer (str): The address of the NTP server\n StartTxDelay (str): The delay amount for transmit\n\n Raises:\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n self._update(locals())\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n\n @NtpServer.setter\n def NtpServer(self, value):\n self._set_attribute('ntpServer', value)\n <function token>\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n <function token>\n <function token>\n\n @StartTxDelay.setter\n def StartTxDelay(self, value):\n self._set_attribute('startTxDelay', value)\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n\n @property\n def NtpServer(self):\n \"\"\"The address of the NTP server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('ntpServer')\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n @property\n def Hypervisor(self):\n \"\"\"An instance of the Hypervisor class.\n\n Returns:\n obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor.Hypervisor)\n\n Raises:\n NotFoundError: The requested resource does not exist on the server\n ServerError: The server has encountered an uncategorized error condition\n \"\"\"\n from ixnetwork_restpy.testplatform.sessions.ixnetwork.availablehardware.virtualchassis.hypervisor.hypervisor import Hypervisor\n return Hypervisor(self)\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n\n @property\n def LicenseServer(self):\n \"\"\"The address of the license server\n\n Returns:\n str\n \"\"\"\n return self._get_attribute('licenseServer')\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @EnableLicenseCheck.setter\n def EnableLicenseCheck(self, value):\n self._set_attribute('enableLicenseCheck', value)\n <function token>\n <function token>\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n @LicenseServer.setter\n def LicenseServer(self, value):\n self._set_attribute('licenseServer', value)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass VirtualChassis(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,709 |
0a42c54ef1412b7f3b8e95da1d65ee05dfa14089
|
from dataframe import *
from chaid import SuperCHAID, SuperCHAIDVisualizer
supernode_features = [manufacturing_region]
features_list = [customer_region, product_family, make_vs_buy]
dependant_variable = gm
super_tree = SuperCHAID(supernode_features, features_list, dependant_variable)
super_tree.fit(df)
visualizer = SuperCHAIDVisualizer(super_tree)
visualizer.export("tree")
input_row = df.loc[0]
input_row[make_vs_buy] = np.nan
print(input_row[supernode_features + features_list])
print()
result = super_tree.predict(input_row, impute=True)
if result is not None:
segment, segment_pairs, imputed_pairs = result
print("Imputed pairs:", imputed_pairs)
print("Supernode pairs:", segment.supernode_pairs)
print("Segment pairs:", segment_pairs)
print(segment)
|
[
"from dataframe import *\nfrom chaid import SuperCHAID, SuperCHAIDVisualizer\n\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\n\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\n\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export(\"tree\")\n\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\n\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print(\"Imputed pairs:\", imputed_pairs)\n print(\"Supernode pairs:\", segment.supernode_pairs)\n print(\"Segment pairs:\", segment_pairs)\n print(segment)\n",
"from dataframe import *\nfrom chaid import SuperCHAID, SuperCHAIDVisualizer\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export('tree')\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"<import token>\nsupernode_features = [manufacturing_region]\nfeatures_list = [customer_region, product_family, make_vs_buy]\ndependant_variable = gm\nsuper_tree = SuperCHAID(supernode_features, features_list, dependant_variable)\nsuper_tree.fit(df)\nvisualizer = SuperCHAIDVisualizer(super_tree)\nvisualizer.export('tree')\ninput_row = df.loc[0]\ninput_row[make_vs_buy] = np.nan\nprint(input_row[supernode_features + features_list])\nprint()\nresult = super_tree.predict(input_row, impute=True)\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"<import token>\n<assignment token>\nsuper_tree.fit(df)\n<assignment token>\nvisualizer.export('tree')\n<assignment token>\nprint(input_row[supernode_features + features_list])\nprint()\n<assignment token>\nif result is not None:\n segment, segment_pairs, imputed_pairs = result\n print('Imputed pairs:', imputed_pairs)\n print('Supernode pairs:', segment.supernode_pairs)\n print('Segment pairs:', segment_pairs)\n print(segment)\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,710 |
239f055fd76a3ecb5f384c256ad850ea42739b8f
|
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
import math
from tkinter import *
from tkinter.ttk import *
from facedetectandtrack import *
x_vals = []
root = Tk()
counter=0
#def graph():
plt.style.use('seaborn')
def animate(i):
data = pd.read_csv('data.csv')
global x_vals
global counter
x_vals.append(counter)
try:
x = data.iloc[x_vals,0]
y = data.iloc[x_vals,1]
if counter>10:
x_vals.pop(0)
plt.cla()
axes=plt.gca()
axes.set_ylim([0,30])
#plt.plot(x, y)
counter=counter+1
height = root.winfo_screenheight()
width = root.winfo_screenwidth()
screen_x1 = width/2
screen_y1 = height/2
X = screen_x1 - face_x2
Y = screen_y1 - face_y2
d_x = (X*X)
d_y = (Y*Y)
D = d_x + d_y
distance = math.sqrt(D)
#print(distance)
plt.scatter(counter ,distance, s= 50,linewidth=1)
plt.xlabel("Time")
plt.ylabel("Movement of student from the center of screen")
plt.tight_layout()
except IndexError as e:
print('Graph ended')
exit(0)
ani = FuncAnimation(plt.gcf(), animate, interval=1000)
plt.savefig("Scatter_Graph.png")
plt.tight_layout()
plt.show()
|
[
"\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nimport math\nfrom tkinter import * \nfrom tkinter.ttk import *\nfrom facedetectandtrack import *\n \nx_vals = []\nroot = Tk()\n\n\ncounter=0\n#def graph():\nplt.style.use('seaborn')\n\ndef animate(i):\n data = pd.read_csv('data.csv')\n global x_vals\n global counter\n x_vals.append(counter)\n try:\n x = data.iloc[x_vals,0]\n y = data.iloc[x_vals,1] \n if counter>10:\n x_vals.pop(0)\n\n plt.cla()\n axes=plt.gca()\n axes.set_ylim([0,30])\n #plt.plot(x, y)\n counter=counter+1\n\n height = root.winfo_screenheight() \n width = root.winfo_screenwidth() \n screen_x1 = width/2\n screen_y1 = height/2\n X = screen_x1 - face_x2\n Y = screen_y1 - face_y2\n d_x = (X*X)\n d_y = (Y*Y)\n D = d_x + d_y\n distance = math.sqrt(D)\n #print(distance)\n plt.scatter(counter ,distance, s= 50,linewidth=1)\n\n plt.xlabel(\"Time\")\n plt.ylabel(\"Movement of student from the center of screen\")\n\n\n plt.tight_layout()\n except IndexError as e:\n print('Graph ended')\n exit(0)\n\nani = FuncAnimation(plt.gcf(), animate, interval=1000)\nplt.savefig(\"Scatter_Graph.png\")\n\nplt.tight_layout()\nplt.show()",
"import pandas as pd\nimport matplotlib.pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nimport math\nfrom tkinter import *\nfrom tkinter.ttk import *\nfrom facedetectandtrack import *\nx_vals = []\nroot = Tk()\ncounter = 0\nplt.style.use('seaborn')\n\n\ndef animate(i):\n data = pd.read_csv('data.csv')\n global x_vals\n global counter\n x_vals.append(counter)\n try:\n x = data.iloc[x_vals, 0]\n y = data.iloc[x_vals, 1]\n if counter > 10:\n x_vals.pop(0)\n plt.cla()\n axes = plt.gca()\n axes.set_ylim([0, 30])\n counter = counter + 1\n height = root.winfo_screenheight()\n width = root.winfo_screenwidth()\n screen_x1 = width / 2\n screen_y1 = height / 2\n X = screen_x1 - face_x2\n Y = screen_y1 - face_y2\n d_x = X * X\n d_y = Y * Y\n D = d_x + d_y\n distance = math.sqrt(D)\n plt.scatter(counter, distance, s=50, linewidth=1)\n plt.xlabel('Time')\n plt.ylabel('Movement of student from the center of screen')\n plt.tight_layout()\n except IndexError as e:\n print('Graph ended')\n exit(0)\n\n\nani = FuncAnimation(plt.gcf(), animate, interval=1000)\nplt.savefig('Scatter_Graph.png')\nplt.tight_layout()\nplt.show()\n",
"<import token>\nx_vals = []\nroot = Tk()\ncounter = 0\nplt.style.use('seaborn')\n\n\ndef animate(i):\n data = pd.read_csv('data.csv')\n global x_vals\n global counter\n x_vals.append(counter)\n try:\n x = data.iloc[x_vals, 0]\n y = data.iloc[x_vals, 1]\n if counter > 10:\n x_vals.pop(0)\n plt.cla()\n axes = plt.gca()\n axes.set_ylim([0, 30])\n counter = counter + 1\n height = root.winfo_screenheight()\n width = root.winfo_screenwidth()\n screen_x1 = width / 2\n screen_y1 = height / 2\n X = screen_x1 - face_x2\n Y = screen_y1 - face_y2\n d_x = X * X\n d_y = Y * Y\n D = d_x + d_y\n distance = math.sqrt(D)\n plt.scatter(counter, distance, s=50, linewidth=1)\n plt.xlabel('Time')\n plt.ylabel('Movement of student from the center of screen')\n plt.tight_layout()\n except IndexError as e:\n print('Graph ended')\n exit(0)\n\n\nani = FuncAnimation(plt.gcf(), animate, interval=1000)\nplt.savefig('Scatter_Graph.png')\nplt.tight_layout()\nplt.show()\n",
"<import token>\n<assignment token>\nplt.style.use('seaborn')\n\n\ndef animate(i):\n data = pd.read_csv('data.csv')\n global x_vals\n global counter\n x_vals.append(counter)\n try:\n x = data.iloc[x_vals, 0]\n y = data.iloc[x_vals, 1]\n if counter > 10:\n x_vals.pop(0)\n plt.cla()\n axes = plt.gca()\n axes.set_ylim([0, 30])\n counter = counter + 1\n height = root.winfo_screenheight()\n width = root.winfo_screenwidth()\n screen_x1 = width / 2\n screen_y1 = height / 2\n X = screen_x1 - face_x2\n Y = screen_y1 - face_y2\n d_x = X * X\n d_y = Y * Y\n D = d_x + d_y\n distance = math.sqrt(D)\n plt.scatter(counter, distance, s=50, linewidth=1)\n plt.xlabel('Time')\n plt.ylabel('Movement of student from the center of screen')\n plt.tight_layout()\n except IndexError as e:\n print('Graph ended')\n exit(0)\n\n\n<assignment token>\nplt.savefig('Scatter_Graph.png')\nplt.tight_layout()\nplt.show()\n",
"<import token>\n<assignment token>\n<code token>\n\n\ndef animate(i):\n data = pd.read_csv('data.csv')\n global x_vals\n global counter\n x_vals.append(counter)\n try:\n x = data.iloc[x_vals, 0]\n y = data.iloc[x_vals, 1]\n if counter > 10:\n x_vals.pop(0)\n plt.cla()\n axes = plt.gca()\n axes.set_ylim([0, 30])\n counter = counter + 1\n height = root.winfo_screenheight()\n width = root.winfo_screenwidth()\n screen_x1 = width / 2\n screen_y1 = height / 2\n X = screen_x1 - face_x2\n Y = screen_y1 - face_y2\n d_x = X * X\n d_y = Y * Y\n D = d_x + d_y\n distance = math.sqrt(D)\n plt.scatter(counter, distance, s=50, linewidth=1)\n plt.xlabel('Time')\n plt.ylabel('Movement of student from the center of screen')\n plt.tight_layout()\n except IndexError as e:\n print('Graph ended')\n exit(0)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<code token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
9,711 |
35647ed5e2c128a5bf819a1e47ead7e958172b1c
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 24 22:05:12 2019
@author: admin
"""
for index in range(test_set.shape[0]):
print(index)
|
[
"#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Apr 24 22:05:12 2019\n\n@author: admin\n\"\"\"\n\nfor index in range(test_set.shape[0]):\n print(index)",
"<docstring token>\nfor index in range(test_set.shape[0]):\n print(index)\n",
"<docstring token>\n<code token>\n"
] | false |
9,712 |
6907a1e08d728732eebf81fec7c0dab8729448e2
|
# Generated by Django 2.1.5 on 2019-01-20 18:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Destination',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('image', models.ImageField(upload_to='img/destinations')),
],
),
migrations.CreateModel(
name='Gallery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('image', models.ImageField(upload_to='img/tours')),
],
),
migrations.CreateModel(
name='Tour',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='title for admin')),
('status', models.BooleanField(default=False)),
('price', models.IntegerField()),
('stars', models.IntegerField(choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])),
('feautured', models.BooleanField(default=True)),
('destination', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Destination')),
],
),
migrations.CreateModel(
name='TourDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('descreption', models.TextField()),
('tour', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Tour')),
],
),
migrations.AddField(
model_name='gallery',
name='tour',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Tour'),
),
]
|
[
"# Generated by Django 2.1.5 on 2019-01-20 18:11\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Destination',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=50)),\n ('image', models.ImageField(upload_to='img/destinations')),\n ],\n ),\n migrations.CreateModel(\n name='Gallery',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('title', models.CharField(max_length=50)),\n ('image', models.ImageField(upload_to='img/tours')),\n ],\n ),\n migrations.CreateModel(\n name='Tour',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('title', models.CharField(max_length=50, verbose_name='title for admin')),\n ('status', models.BooleanField(default=False)),\n ('price', models.IntegerField()),\n ('stars', models.IntegerField(choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])),\n ('feautured', models.BooleanField(default=True)),\n ('destination', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Destination')),\n ],\n ),\n migrations.CreateModel(\n name='TourDetail',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('title', models.CharField(max_length=50)),\n ('descreption', models.TextField()),\n ('tour', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Tour')),\n ],\n ),\n migrations.AddField(\n model_name='gallery',\n name='tour',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.Tour'),\n ),\n ]\n",
"from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Destination', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=50\n )), ('image', models.ImageField(upload_to='img/destinations'))]),\n migrations.CreateModel(name='Gallery', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('title', models.CharField(max_length=50)), (\n 'image', models.ImageField(upload_to='img/tours'))]), migrations.\n CreateModel(name='Tour', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('title', models.CharField(max_length=50, verbose_name=\n 'title for admin')), ('status', models.BooleanField(default=False)),\n ('price', models.IntegerField()), ('stars', models.IntegerField(\n choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])), ('feautured',\n models.BooleanField(default=True)), ('destination', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'tours.Destination'))]), migrations.CreateModel(name='TourDetail',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('title', models.CharField(\n max_length=50)), ('descreption', models.TextField()), ('tour',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'tours.Tour'))]), migrations.AddField(model_name='gallery', name=\n 'tour', field=models.ForeignKey(on_delete=django.db.models.deletion\n .CASCADE, to='tours.Tour'))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Destination', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=50\n )), ('image', models.ImageField(upload_to='img/destinations'))]),\n migrations.CreateModel(name='Gallery', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('title', models.CharField(max_length=50)), (\n 'image', models.ImageField(upload_to='img/tours'))]), migrations.\n CreateModel(name='Tour', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('title', models.CharField(max_length=50, verbose_name=\n 'title for admin')), ('status', models.BooleanField(default=False)),\n ('price', models.IntegerField()), ('stars', models.IntegerField(\n choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])), ('feautured',\n models.BooleanField(default=True)), ('destination', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'tours.Destination'))]), migrations.CreateModel(name='TourDetail',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('title', models.CharField(\n max_length=50)), ('descreption', models.TextField()), ('tour',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'tours.Tour'))]), migrations.AddField(model_name='gallery', name=\n 'tour', field=models.ForeignKey(on_delete=django.db.models.deletion\n .CASCADE, to='tours.Tour'))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
9,713 |
e3417980599448f1293b56cb95312088e7a8abe3
|
import os
import imageio
import h5py
import numpy as np
def create_segmentation_test_data(data_path, raw_key, label_key, shape, chunks):
with h5py.File(data_path, 'a') as f:
f.create_dataset(raw_key, data=np.random.rand(*shape), chunks=chunks)
f.create_dataset(label_key, data=np.random.randint(0, 4, size=shape), chunks=chunks)
def create_image_collection_test_data(folder, n_images, min_shape, max_shape):
im_folder = os.path.join(folder, 'images')
label_folder = os.path.join(folder, 'labels')
os.makedirs(im_folder, exist_ok=True)
os.makedirs(label_folder, exist_ok=True)
for i in range(n_images):
shape = tuple(np.random.randint(mins, maxs) for mins, maxs in zip(min_shape, max_shape))
raw = np.random.rand(*shape).astype('int16')
label = np.random.randint(0, 4, size=shape)
imageio.imwrite(os.path.join(im_folder, f"im_{i}.tif"), raw)
imageio.imwrite(os.path.join(label_folder, f"im_{i}.tif"), label)
|
[
"import os\nimport imageio\nimport h5py\nimport numpy as np\n\n\ndef create_segmentation_test_data(data_path, raw_key, label_key, shape, chunks):\n with h5py.File(data_path, 'a') as f:\n f.create_dataset(raw_key, data=np.random.rand(*shape), chunks=chunks)\n f.create_dataset(label_key, data=np.random.randint(0, 4, size=shape), chunks=chunks)\n\n\ndef create_image_collection_test_data(folder, n_images, min_shape, max_shape):\n im_folder = os.path.join(folder, 'images')\n label_folder = os.path.join(folder, 'labels')\n os.makedirs(im_folder, exist_ok=True)\n os.makedirs(label_folder, exist_ok=True)\n\n for i in range(n_images):\n shape = tuple(np.random.randint(mins, maxs) for mins, maxs in zip(min_shape, max_shape))\n raw = np.random.rand(*shape).astype('int16')\n label = np.random.randint(0, 4, size=shape)\n imageio.imwrite(os.path.join(im_folder, f\"im_{i}.tif\"), raw)\n imageio.imwrite(os.path.join(label_folder, f\"im_{i}.tif\"), label)\n",
"import os\nimport imageio\nimport h5py\nimport numpy as np\n\n\ndef create_segmentation_test_data(data_path, raw_key, label_key, shape, chunks\n ):\n with h5py.File(data_path, 'a') as f:\n f.create_dataset(raw_key, data=np.random.rand(*shape), chunks=chunks)\n f.create_dataset(label_key, data=np.random.randint(0, 4, size=shape\n ), chunks=chunks)\n\n\ndef create_image_collection_test_data(folder, n_images, min_shape, max_shape):\n im_folder = os.path.join(folder, 'images')\n label_folder = os.path.join(folder, 'labels')\n os.makedirs(im_folder, exist_ok=True)\n os.makedirs(label_folder, exist_ok=True)\n for i in range(n_images):\n shape = tuple(np.random.randint(mins, maxs) for mins, maxs in zip(\n min_shape, max_shape))\n raw = np.random.rand(*shape).astype('int16')\n label = np.random.randint(0, 4, size=shape)\n imageio.imwrite(os.path.join(im_folder, f'im_{i}.tif'), raw)\n imageio.imwrite(os.path.join(label_folder, f'im_{i}.tif'), label)\n",
"<import token>\n\n\ndef create_segmentation_test_data(data_path, raw_key, label_key, shape, chunks\n ):\n with h5py.File(data_path, 'a') as f:\n f.create_dataset(raw_key, data=np.random.rand(*shape), chunks=chunks)\n f.create_dataset(label_key, data=np.random.randint(0, 4, size=shape\n ), chunks=chunks)\n\n\ndef create_image_collection_test_data(folder, n_images, min_shape, max_shape):\n im_folder = os.path.join(folder, 'images')\n label_folder = os.path.join(folder, 'labels')\n os.makedirs(im_folder, exist_ok=True)\n os.makedirs(label_folder, exist_ok=True)\n for i in range(n_images):\n shape = tuple(np.random.randint(mins, maxs) for mins, maxs in zip(\n min_shape, max_shape))\n raw = np.random.rand(*shape).astype('int16')\n label = np.random.randint(0, 4, size=shape)\n imageio.imwrite(os.path.join(im_folder, f'im_{i}.tif'), raw)\n imageio.imwrite(os.path.join(label_folder, f'im_{i}.tif'), label)\n",
"<import token>\n\n\ndef create_segmentation_test_data(data_path, raw_key, label_key, shape, chunks\n ):\n with h5py.File(data_path, 'a') as f:\n f.create_dataset(raw_key, data=np.random.rand(*shape), chunks=chunks)\n f.create_dataset(label_key, data=np.random.randint(0, 4, size=shape\n ), chunks=chunks)\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n"
] | false |
9,714 |
b7a7941b3555b30ac7e743a5457df76f9eb7cb15
|
#write a program that displays the wor "Hello!"
print("Hello!")
|
[
"#write a program that displays the wor \"Hello!\"\n\nprint(\"Hello!\")\n",
"print('Hello!')\n",
"<code token>\n"
] | false |
9,715 |
c9be3d25824093528e2bee51c045d05e036daa67
|
import sklearn.metrics as metrics
import sklearn.cross_validation as cv
from sklearn.externals import joblib
import MachineLearning.Reinforcement.InternalSQLManager as sqlManager
class ReinforcementLearner:
def __init__(self, clf=None, load=False, clfName=None):
"""
Initialise the Classifier, either from the provided model or from the stored classifier
:param clf: The current classifier, not yet fitted to the data
:param load: Set to True in order to load a previously saved model
"""
if load:
self.clf = joblib.load("model.pkl")
self.reTrain = True
else:
self.clf = clf
self.reTrain = False
if clfName == None:
self.name = self.clf.__class__.__name__
else:
self.name = clfName
def fit(self, X, y, scoring="accuracy", crossval=5):
"""
Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.
:param X: Input Features
:param y: Class Labels
:param scoring: Scoring used for cross validation
:param crossval: Cross Validation number of folds
:return: True if a new model is fit to the data, or a previous model is updated
False if old model when fit to new data performs poorly in comparison to
earlier data
"""
if not self.reTrain: # Train first time
score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)
sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1) # Store the first result of clf
self.clf.fit(X, y)
joblib.dump(self.clf, "model.pkl") # Store the CLF
print("Data Fit")
return True
else:
previousData = sqlManager.selectNewestRecord(self.name) # Check the last entry of CLF
if len(previousData) > 0:
oldSize = previousData[5]
newSize = len(y)
accScore = previousData[3]
score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)
newAccScore = score.mean()
print("Old Accuracy Score : ", accScore)
print("New Accuracy Score : ", newAccScore)
if accScore <= newAccScore: # If new data is benefitial, increases accuracy
print("Reinforcement Learning : Newer model is superior. Saving Model.")
self.clf.fit(X, y)
sqlManager.insertValue(self.name, accScore, newAccScore, oldSize, newSize, 1)
joblib.dump(self.clf, "model.pkl")
return True
else:
print("Reinforcement Learning : Newer model is inferior. Not saving model.")
return False
def predict(self, X):
return self.clf.predict(X)
def __exit__(self, exc_type, exc_val, exc_tb):
sqlManager.close()
if __name__ == "__main__":
pass
|
[
"import sklearn.metrics as metrics\nimport sklearn.cross_validation as cv\nfrom sklearn.externals import joblib\nimport MachineLearning.Reinforcement.InternalSQLManager as sqlManager\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n\n if load:\n self.clf = joblib.load(\"model.pkl\")\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring=\"accuracy\", crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain: # Train first time\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1) # Store the first result of clf\n self.clf.fit(X, y)\n\n joblib.dump(self.clf, \"model.pkl\") # Store the CLF\n print(\"Data Fit\")\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name) # Check the last entry of CLF\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n\n accScore = previousData[3]\n\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n newAccScore = score.mean()\n print(\"Old Accuracy Score : \", accScore)\n print(\"New Accuracy Score : \", newAccScore)\n\n if accScore <= newAccScore: # If new data is benefitial, increases accuracy\n print(\"Reinforcement Learning : Newer model is superior. Saving Model.\")\n self.clf.fit(X, y)\n\n sqlManager.insertValue(self.name, accScore, newAccScore, oldSize, newSize, 1)\n joblib.dump(self.clf, \"model.pkl\")\n return True\n else:\n print(\"Reinforcement Learning : Newer model is inferior. Not saving model.\")\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\nif __name__ == \"__main__\":\n pass\n\n",
"import sklearn.metrics as metrics\nimport sklearn.cross_validation as cv\nfrom sklearn.externals import joblib\nimport MachineLearning.Reinforcement.InternalSQLManager as sqlManager\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring='accuracy', crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain:\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1)\n self.clf.fit(X, y)\n joblib.dump(self.clf, 'model.pkl')\n print('Data Fit')\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name)\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n accScore = previousData[3]\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval\n )\n newAccScore = score.mean()\n print('Old Accuracy Score : ', accScore)\n print('New Accuracy Score : ', newAccScore)\n if accScore <= newAccScore:\n print(\n 'Reinforcement Learning : Newer model is superior. Saving Model.'\n )\n self.clf.fit(X, y)\n sqlManager.insertValue(self.name, accScore, newAccScore,\n oldSize, newSize, 1)\n joblib.dump(self.clf, 'model.pkl')\n return True\n else:\n print(\n 'Reinforcement Learning : Newer model is inferior. Not saving model.'\n )\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\nif __name__ == '__main__':\n pass\n",
"<import token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring='accuracy', crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain:\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1)\n self.clf.fit(X, y)\n joblib.dump(self.clf, 'model.pkl')\n print('Data Fit')\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name)\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n accScore = previousData[3]\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval\n )\n newAccScore = score.mean()\n print('Old Accuracy Score : ', accScore)\n print('New Accuracy Score : ', newAccScore)\n if accScore <= newAccScore:\n print(\n 'Reinforcement Learning : Newer model is superior. Saving Model.'\n )\n self.clf.fit(X, y)\n sqlManager.insertValue(self.name, accScore, newAccScore,\n oldSize, newSize, 1)\n joblib.dump(self.clf, 'model.pkl')\n return True\n else:\n print(\n 'Reinforcement Learning : Newer model is inferior. Not saving model.'\n )\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\nif __name__ == '__main__':\n pass\n",
"<import token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring='accuracy', crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain:\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1)\n self.clf.fit(X, y)\n joblib.dump(self.clf, 'model.pkl')\n print('Data Fit')\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name)\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n accScore = previousData[3]\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval\n )\n newAccScore = score.mean()\n print('Old Accuracy Score : ', accScore)\n print('New Accuracy Score : ', newAccScore)\n if accScore <= newAccScore:\n print(\n 'Reinforcement Learning : Newer model is superior. Saving Model.'\n )\n self.clf.fit(X, y)\n sqlManager.insertValue(self.name, accScore, newAccScore,\n oldSize, newSize, 1)\n joblib.dump(self.clf, 'model.pkl')\n return True\n else:\n print(\n 'Reinforcement Learning : Newer model is inferior. Not saving model.'\n )\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<code token>\n",
"<import token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n <function token>\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<code token>\n",
"<import token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n <function token>\n <function token>\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<code token>\n",
"<import token>\n\n\nclass ReinforcementLearner:\n <function token>\n <function token>\n <function token>\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<code token>\n",
"<import token>\n\n\nclass ReinforcementLearner:\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<code token>\n"
] | false |
9,716 |
13c55c313c740edce48fc979e8956fdd018e8aab
|
"""This module contains a class supporting composition of AugraphyPipelines"""
class ComposePipelines:
"""The composition of multiple AugraphyPipelines.
Define AugraphyPipelines elsewhere, then use this to compose them.
ComposePipelines objects are callable on images (as numpy.ndarrays).
:param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.
:type pipelines: list or tuple
"""
def __init__(self, pipelines):
self.pipelines = pipelines
def __call__(self, image):
augmented_image = image.copy()
newpipeline = dict()
for i, pipeline in enumerate(self.pipelines):
data_output = pipeline.augment(augmented_image)
augmented_image = data_output["output"]
for key in data_output.keys():
newkey = "pipeline" + str(i) + "-" + key
newpipeline[newkey] = data_output[key]
return newpipeline
|
[
"\"\"\"This module contains a class supporting composition of AugraphyPipelines\"\"\"\n\n\nclass ComposePipelines:\n \"\"\"The composition of multiple AugraphyPipelines.\n Define AugraphyPipelines elsewhere, then use this to compose them.\n ComposePipelines objects are callable on images (as numpy.ndarrays).\n\n :param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.\n :type pipelines: list or tuple\n \"\"\"\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n\n augmented_image = image.copy()\n newpipeline = dict()\n\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output[\"output\"]\n\n for key in data_output.keys():\n newkey = \"pipeline\" + str(i) + \"-\" + key\n newpipeline[newkey] = data_output[key]\n\n return newpipeline\n",
"<docstring token>\n\n\nclass ComposePipelines:\n \"\"\"The composition of multiple AugraphyPipelines.\n Define AugraphyPipelines elsewhere, then use this to compose them.\n ComposePipelines objects are callable on images (as numpy.ndarrays).\n\n :param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.\n :type pipelines: list or tuple\n \"\"\"\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"<docstring token>\n\n\nclass ComposePipelines:\n <docstring token>\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"<docstring token>\n\n\nclass ComposePipelines:\n <docstring token>\n <function token>\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"<docstring token>\n\n\nclass ComposePipelines:\n <docstring token>\n <function token>\n <function token>\n",
"<docstring token>\n<class token>\n"
] | false |
9,717 |
bcdd36b534fd3551de9cb40efc11581f4d95a002
|
import sys
from Node import Node
from PriorityQueue import PriorityQueue
def Print(text):
if text is None or len(text) == 0:
print('invalid text.')
print('--------------------------------------------------------------')
return
text_set = set()
for i in text:
text_set.add(i)
if len(text_set) == 1:
print('invalid text.')
print('--------------------------------------------------------------')
return
print("The size of the data is: {}\n".format(sys.getsizeof(text)))
print("The content of the data is: {}\n".format(text))
encoded_data, tree = huffman_encoding(text)
print("The size of the encoded data is: {}\n".format(sys.getsizeof(int(encoded_data, base=2))))
print("The content of the encoded data is: {}\n".format(encoded_data))
decoded_data = huffman_decoding(encoded_data, tree)
print("The size of the decoded data is: {}\n".format(sys.getsizeof(decoded_data)))
print("The content of the encoded data is: {}\n".format(decoded_data))
print('--------------------------------------------------------------')
# this method will print huffman tree
def inorder(root):
if root is not None:
inorder(root.left)
print('Data: ', root.data, 'Freq: ', root.frequency)
if root.right is not None:
print('Right: ', root.right.data)
if root.left is not None:
print('Left: ', root.left.data)
inorder(root.right)
# end method inorder(root)
def generate_encoded_data(root):
"""
:param root: is a root of huffman tree
:return: dictionary contains all codes for each letter in the text.
"""
return generate_encoded_data2(root, {}, '')
# helper method
def generate_encoded_data2(root, dic, code):
if root is not None:
# go left of the tree if root has a left child.
if root.left is not None:
s = code + '0'
generate_encoded_data2(root.left, dic, s)
# if root is a leaf node then add this letter as a key and the code as a value.
if str(root.data).isalpha() or root.data == ' ':
dic.update({root.data: code})
# go left of the tree if root has a right child.
if root.right is not None:
s = code + '1'
generate_encoded_data2(root.right, dic, s)
return dic
else:
return None
def huffman_encoding(data):
"""
:param data: is the text that will we encode.
:return: encoded text as a binary and a root of huffman tree.
"""
if len(data) == 0 or data is None:
print('Please enter a valid data.')
return '', None
min_heap = PriorityQueue()
count_dic = {}
# count frequency of each letter and add it in count_dic as a value of the letter.
for i in range(len(data)):
if data[i] in count_dic:
count_dic[data[i]] += 1
else:
count_dic[data[i]] = 1
# add all element in count_dic to min_heap.
for i, j in count_dic.items():
new_node = Node(i, j)
min_heap.push(new_node, new_node.frequency)
count: int = 1
# create huffman tree phase 1.
while min_heap.size() >= 2:
item_1 = min_heap.pop()
item_2 = min_heap.pop()
sum_frequency = item_1.frequency + item_2.frequency
node = Node(count, sum_frequency, item_1, item_2)
min_heap.push(node, node.frequency)
count += 1
# the root of huffman tree.
root = min_heap.pop()
# generate the Encoded Data.
codes_ = generate_encoded_data(root)
# create string represent encoded data.
encoded = ''
for char in data:
if codes_.get(char) is not None:
encoded += codes_.get(char)
return encoded, root
def huffman_decoding(data, root):
"""
:param data: is the encoded text as a binary.
:param root: is the root of huffman tree.
:return: the decoded data.
"""
if len(data) == 0:
print('Please enter a valid data.')
return '', None
decoded = ''
i = 0
curr = root
while i < len(data):
"""
If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if
the current bit is 1.
"""
if data[i] == '0':
curr = curr.left
else:
curr = curr.right
# go to the next cell of the encoded data.
i += 1
# if curr is leaf node then this node contain a letter.
if curr.is_leaf():
# add this letter to decoded data.
decoded += curr.data
# return and start from the root to find the next letter.
curr = root
return decoded
# Test case 1 -----------------------------------
a_great_sentence = 'The bird is the word'
Print(a_great_sentence)
# Test case 2 -----------------------------------
t1 = ''
Print(t1) # will print 'invalid text'
# Test case 3 -----------------------------------
t2 = 'AAAAAB'
Print(t2)
# Test case 4 -----------------------------------
t3 = 'AAAAA'
Print(t3) # will print 'invalid text'
|
[
"import sys\r\nfrom Node import Node\r\nfrom PriorityQueue import PriorityQueue\r\n\r\n\r\ndef Print(text):\r\n if text is None or len(text) == 0:\r\n print('invalid text.')\r\n print('--------------------------------------------------------------')\r\n return\r\n\r\n text_set = set()\r\n for i in text:\r\n text_set.add(i)\r\n\r\n if len(text_set) == 1:\r\n print('invalid text.')\r\n print('--------------------------------------------------------------')\r\n return\r\n\r\n print(\"The size of the data is: {}\\n\".format(sys.getsizeof(text)))\r\n print(\"The content of the data is: {}\\n\".format(text))\r\n\r\n encoded_data, tree = huffman_encoding(text)\r\n\r\n print(\"The size of the encoded data is: {}\\n\".format(sys.getsizeof(int(encoded_data, base=2))))\r\n print(\"The content of the encoded data is: {}\\n\".format(encoded_data))\r\n\r\n decoded_data = huffman_decoding(encoded_data, tree)\r\n\r\n print(\"The size of the decoded data is: {}\\n\".format(sys.getsizeof(decoded_data)))\r\n print(\"The content of the encoded data is: {}\\n\".format(decoded_data))\r\n print('--------------------------------------------------------------')\r\n\r\n\r\n# this method will print huffman tree\r\ndef inorder(root):\r\n if root is not None:\r\n inorder(root.left)\r\n print('Data: ', root.data, 'Freq: ', root.frequency)\r\n if root.right is not None:\r\n print('Right: ', root.right.data)\r\n if root.left is not None:\r\n print('Left: ', root.left.data)\r\n inorder(root.right)\r\n\r\n# end method inorder(root)\r\n\r\n\r\ndef generate_encoded_data(root):\r\n \"\"\"\r\n :param root: is a root of huffman tree\r\n :return: dictionary contains all codes for each letter in the text.\r\n \"\"\"\r\n return generate_encoded_data2(root, {}, '')\r\n\r\n\r\n# helper method\r\ndef generate_encoded_data2(root, dic, code):\r\n if root is not None:\r\n # go left of the tree if root has a left child.\r\n if root.left is not None:\r\n s = code + '0'\r\n generate_encoded_data2(root.left, dic, s)\r\n\r\n # if root is a leaf node then add this letter as a key and the code as a value.\r\n if str(root.data).isalpha() or root.data == ' ':\r\n dic.update({root.data: code})\r\n\r\n # go left of the tree if root has a right child.\r\n if root.right is not None:\r\n s = code + '1'\r\n generate_encoded_data2(root.right, dic, s)\r\n\r\n return dic\r\n else:\r\n return None\r\n\r\n\r\ndef huffman_encoding(data):\r\n \"\"\"\r\n :param data: is the text that will we encode.\r\n :return: encoded text as a binary and a root of huffman tree.\r\n \"\"\"\r\n if len(data) == 0 or data is None:\r\n print('Please enter a valid data.')\r\n return '', None\r\n\r\n min_heap = PriorityQueue()\r\n count_dic = {}\r\n # count frequency of each letter and add it in count_dic as a value of the letter.\r\n for i in range(len(data)):\r\n if data[i] in count_dic:\r\n count_dic[data[i]] += 1\r\n else:\r\n count_dic[data[i]] = 1\r\n\r\n # add all element in count_dic to min_heap.\r\n for i, j in count_dic.items():\r\n new_node = Node(i, j)\r\n min_heap.push(new_node, new_node.frequency)\r\n\r\n count: int = 1\r\n\r\n # create huffman tree phase 1.\r\n while min_heap.size() >= 2:\r\n item_1 = min_heap.pop()\r\n item_2 = min_heap.pop()\r\n sum_frequency = item_1.frequency + item_2.frequency\r\n node = Node(count, sum_frequency, item_1, item_2)\r\n min_heap.push(node, node.frequency)\r\n count += 1\r\n\r\n # the root of huffman tree.\r\n root = min_heap.pop()\r\n # generate the Encoded Data.\r\n codes_ = generate_encoded_data(root)\r\n\r\n # create string represent encoded data.\r\n encoded = ''\r\n for char in data:\r\n if codes_.get(char) is not None:\r\n encoded += codes_.get(char)\r\n\r\n return encoded, root\r\n\r\n\r\ndef huffman_decoding(data, root):\r\n \"\"\"\r\n :param data: is the encoded text as a binary.\r\n :param root: is the root of huffman tree.\r\n :return: the decoded data.\r\n \"\"\"\r\n if len(data) == 0:\r\n print('Please enter a valid data.')\r\n return '', None\r\n\r\n decoded = ''\r\n i = 0\r\n curr = root\r\n while i < len(data):\r\n \"\"\"\r\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\r\n the current bit is 1.\r\n \"\"\"\r\n if data[i] == '0':\r\n curr = curr.left\r\n else:\r\n curr = curr.right\r\n # go to the next cell of the encoded data.\r\n i += 1\r\n\r\n # if curr is leaf node then this node contain a letter.\r\n if curr.is_leaf():\r\n # add this letter to decoded data.\r\n decoded += curr.data\r\n # return and start from the root to find the next letter.\r\n curr = root\r\n\r\n return decoded\r\n\r\n\r\n# Test case 1 -----------------------------------\r\na_great_sentence = 'The bird is the word'\r\nPrint(a_great_sentence)\r\n\r\n# Test case 2 -----------------------------------\r\nt1 = ''\r\nPrint(t1) # will print 'invalid text'\r\n\r\n# Test case 3 -----------------------------------\r\nt2 = 'AAAAAB'\r\nPrint(t2)\r\n\r\n# Test case 4 -----------------------------------\r\nt3 = 'AAAAA'\r\nPrint(t3) # will print 'invalid text'\r\n",
"import sys\nfrom Node import Node\nfrom PriorityQueue import PriorityQueue\n\n\ndef Print(text):\n if text is None or len(text) == 0:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n text_set = set()\n for i in text:\n text_set.add(i)\n if len(text_set) == 1:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n print('The size of the data is: {}\\n'.format(sys.getsizeof(text)))\n print('The content of the data is: {}\\n'.format(text))\n encoded_data, tree = huffman_encoding(text)\n print('The size of the encoded data is: {}\\n'.format(sys.getsizeof(int(\n encoded_data, base=2))))\n print('The content of the encoded data is: {}\\n'.format(encoded_data))\n decoded_data = huffman_decoding(encoded_data, tree)\n print('The size of the decoded data is: {}\\n'.format(sys.getsizeof(\n decoded_data)))\n print('The content of the encoded data is: {}\\n'.format(decoded_data))\n print('--------------------------------------------------------------')\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\ndef huffman_encoding(data):\n \"\"\"\n :param data: is the text that will we encode.\n :return: encoded text as a binary and a root of huffman tree.\n \"\"\"\n if len(data) == 0 or data is None:\n print('Please enter a valid data.')\n return '', None\n min_heap = PriorityQueue()\n count_dic = {}\n for i in range(len(data)):\n if data[i] in count_dic:\n count_dic[data[i]] += 1\n else:\n count_dic[data[i]] = 1\n for i, j in count_dic.items():\n new_node = Node(i, j)\n min_heap.push(new_node, new_node.frequency)\n count: int = 1\n while min_heap.size() >= 2:\n item_1 = min_heap.pop()\n item_2 = min_heap.pop()\n sum_frequency = item_1.frequency + item_2.frequency\n node = Node(count, sum_frequency, item_1, item_2)\n min_heap.push(node, node.frequency)\n count += 1\n root = min_heap.pop()\n codes_ = generate_encoded_data(root)\n encoded = ''\n for char in data:\n if codes_.get(char) is not None:\n encoded += codes_.get(char)\n return encoded, root\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\na_great_sentence = 'The bird is the word'\nPrint(a_great_sentence)\nt1 = ''\nPrint(t1)\nt2 = 'AAAAAB'\nPrint(t2)\nt3 = 'AAAAA'\nPrint(t3)\n",
"<import token>\n\n\ndef Print(text):\n if text is None or len(text) == 0:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n text_set = set()\n for i in text:\n text_set.add(i)\n if len(text_set) == 1:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n print('The size of the data is: {}\\n'.format(sys.getsizeof(text)))\n print('The content of the data is: {}\\n'.format(text))\n encoded_data, tree = huffman_encoding(text)\n print('The size of the encoded data is: {}\\n'.format(sys.getsizeof(int(\n encoded_data, base=2))))\n print('The content of the encoded data is: {}\\n'.format(encoded_data))\n decoded_data = huffman_decoding(encoded_data, tree)\n print('The size of the decoded data is: {}\\n'.format(sys.getsizeof(\n decoded_data)))\n print('The content of the encoded data is: {}\\n'.format(decoded_data))\n print('--------------------------------------------------------------')\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\ndef huffman_encoding(data):\n \"\"\"\n :param data: is the text that will we encode.\n :return: encoded text as a binary and a root of huffman tree.\n \"\"\"\n if len(data) == 0 or data is None:\n print('Please enter a valid data.')\n return '', None\n min_heap = PriorityQueue()\n count_dic = {}\n for i in range(len(data)):\n if data[i] in count_dic:\n count_dic[data[i]] += 1\n else:\n count_dic[data[i]] = 1\n for i, j in count_dic.items():\n new_node = Node(i, j)\n min_heap.push(new_node, new_node.frequency)\n count: int = 1\n while min_heap.size() >= 2:\n item_1 = min_heap.pop()\n item_2 = min_heap.pop()\n sum_frequency = item_1.frequency + item_2.frequency\n node = Node(count, sum_frequency, item_1, item_2)\n min_heap.push(node, node.frequency)\n count += 1\n root = min_heap.pop()\n codes_ = generate_encoded_data(root)\n encoded = ''\n for char in data:\n if codes_.get(char) is not None:\n encoded += codes_.get(char)\n return encoded, root\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\na_great_sentence = 'The bird is the word'\nPrint(a_great_sentence)\nt1 = ''\nPrint(t1)\nt2 = 'AAAAAB'\nPrint(t2)\nt3 = 'AAAAA'\nPrint(t3)\n",
"<import token>\n\n\ndef Print(text):\n if text is None or len(text) == 0:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n text_set = set()\n for i in text:\n text_set.add(i)\n if len(text_set) == 1:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n print('The size of the data is: {}\\n'.format(sys.getsizeof(text)))\n print('The content of the data is: {}\\n'.format(text))\n encoded_data, tree = huffman_encoding(text)\n print('The size of the encoded data is: {}\\n'.format(sys.getsizeof(int(\n encoded_data, base=2))))\n print('The content of the encoded data is: {}\\n'.format(encoded_data))\n decoded_data = huffman_decoding(encoded_data, tree)\n print('The size of the decoded data is: {}\\n'.format(sys.getsizeof(\n decoded_data)))\n print('The content of the encoded data is: {}\\n'.format(decoded_data))\n print('--------------------------------------------------------------')\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\ndef huffman_encoding(data):\n \"\"\"\n :param data: is the text that will we encode.\n :return: encoded text as a binary and a root of huffman tree.\n \"\"\"\n if len(data) == 0 or data is None:\n print('Please enter a valid data.')\n return '', None\n min_heap = PriorityQueue()\n count_dic = {}\n for i in range(len(data)):\n if data[i] in count_dic:\n count_dic[data[i]] += 1\n else:\n count_dic[data[i]] = 1\n for i, j in count_dic.items():\n new_node = Node(i, j)\n min_heap.push(new_node, new_node.frequency)\n count: int = 1\n while min_heap.size() >= 2:\n item_1 = min_heap.pop()\n item_2 = min_heap.pop()\n sum_frequency = item_1.frequency + item_2.frequency\n node = Node(count, sum_frequency, item_1, item_2)\n min_heap.push(node, node.frequency)\n count += 1\n root = min_heap.pop()\n codes_ = generate_encoded_data(root)\n encoded = ''\n for char in data:\n if codes_.get(char) is not None:\n encoded += codes_.get(char)\n return encoded, root\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\nPrint(a_great_sentence)\n<assignment token>\nPrint(t1)\n<assignment token>\nPrint(t2)\n<assignment token>\nPrint(t3)\n",
"<import token>\n\n\ndef Print(text):\n if text is None or len(text) == 0:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n text_set = set()\n for i in text:\n text_set.add(i)\n if len(text_set) == 1:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n print('The size of the data is: {}\\n'.format(sys.getsizeof(text)))\n print('The content of the data is: {}\\n'.format(text))\n encoded_data, tree = huffman_encoding(text)\n print('The size of the encoded data is: {}\\n'.format(sys.getsizeof(int(\n encoded_data, base=2))))\n print('The content of the encoded data is: {}\\n'.format(encoded_data))\n decoded_data = huffman_decoding(encoded_data, tree)\n print('The size of the decoded data is: {}\\n'.format(sys.getsizeof(\n decoded_data)))\n print('The content of the encoded data is: {}\\n'.format(decoded_data))\n print('--------------------------------------------------------------')\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\ndef huffman_encoding(data):\n \"\"\"\n :param data: is the text that will we encode.\n :return: encoded text as a binary and a root of huffman tree.\n \"\"\"\n if len(data) == 0 or data is None:\n print('Please enter a valid data.')\n return '', None\n min_heap = PriorityQueue()\n count_dic = {}\n for i in range(len(data)):\n if data[i] in count_dic:\n count_dic[data[i]] += 1\n else:\n count_dic[data[i]] = 1\n for i, j in count_dic.items():\n new_node = Node(i, j)\n min_heap.push(new_node, new_node.frequency)\n count: int = 1\n while min_heap.size() >= 2:\n item_1 = min_heap.pop()\n item_2 = min_heap.pop()\n sum_frequency = item_1.frequency + item_2.frequency\n node = Node(count, sum_frequency, item_1, item_2)\n min_heap.push(node, node.frequency)\n count += 1\n root = min_heap.pop()\n codes_ = generate_encoded_data(root)\n encoded = ''\n for char in data:\n if codes_.get(char) is not None:\n encoded += codes_.get(char)\n return encoded, root\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Print(text):\n if text is None or len(text) == 0:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n text_set = set()\n for i in text:\n text_set.add(i)\n if len(text_set) == 1:\n print('invalid text.')\n print('--------------------------------------------------------------')\n return\n print('The size of the data is: {}\\n'.format(sys.getsizeof(text)))\n print('The content of the data is: {}\\n'.format(text))\n encoded_data, tree = huffman_encoding(text)\n print('The size of the encoded data is: {}\\n'.format(sys.getsizeof(int(\n encoded_data, base=2))))\n print('The content of the encoded data is: {}\\n'.format(encoded_data))\n decoded_data = huffman_decoding(encoded_data, tree)\n print('The size of the decoded data is: {}\\n'.format(sys.getsizeof(\n decoded_data)))\n print('The content of the encoded data is: {}\\n'.format(decoded_data))\n print('--------------------------------------------------------------')\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\n<function token>\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n\n\ndef inorder(root):\n if root is not None:\n inorder(root.left)\n print('Data: ', root.data, 'Freq: ', root.frequency)\n if root.right is not None:\n print('Right: ', root.right.data)\n if root.left is not None:\n print('Left: ', root.left.data)\n inorder(root.right)\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\n<function token>\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n\n\ndef generate_encoded_data(root):\n \"\"\"\n :param root: is a root of huffman tree\n :return: dictionary contains all codes for each letter in the text.\n \"\"\"\n return generate_encoded_data2(root, {}, '')\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\n<function token>\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\ndef generate_encoded_data2(root, dic, code):\n if root is not None:\n if root.left is not None:\n s = code + '0'\n generate_encoded_data2(root.left, dic, s)\n if str(root.data).isalpha() or root.data == ' ':\n dic.update({root.data: code})\n if root.right is not None:\n s = code + '1'\n generate_encoded_data2(root.right, dic, s)\n return dic\n else:\n return None\n\n\n<function token>\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef huffman_decoding(data, root):\n \"\"\"\n :param data: is the encoded text as a binary.\n :param root: is the root of huffman tree.\n :return: the decoded data.\n \"\"\"\n if len(data) == 0:\n print('Please enter a valid data.')\n return '', None\n decoded = ''\n i = 0\n curr = root\n while i < len(data):\n \"\"\"\n If the current bit of encoded data is 0, move to the left child, else move to the right child of the tree if\n the current bit is 1.\n \"\"\"\n if data[i] == '0':\n curr = curr.left\n else:\n curr = curr.right\n i += 1\n if curr.is_leaf():\n decoded += curr.data\n curr = root\n return decoded\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,718 |
ae7fc034249b7dde6d6bca33e2e6c8f464284cfc
|
#!/usr/bin/env python3
import datetime
import time
import board
from busio import I2C
import adafruit_bme680
# Create library object using our Bus I2C port
i2c = I2C(board.SCL, board.SDA)
bme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)
# change this to match the location's pressure (hPa) at sea level
bme680.sea_level_pressure = 1006.0
file = open("/home/pi/Payload/src/sensory/burning_data.txt","a")
while True:
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
file.write("\ntimestamp: %s " % st)
print("\ntimestamp: %s " % st)
print("Temperature: %0.1f C" % bme680.temperature)
file.write("Temperature: %0.1f C" % bme680.temperature)
print("Gas: %d ohm" % bme680.gas)
file.write("Gas: %d ohm" % bme680.gas)
print("Humidity: %0.1f %%" % bme680.humidity)
file.write("Humidity: %0.1f %%" % bme680.humidity)
print("Pressure: %0.3f hPa" % bme680.pressure)
file.write("Pressure: %0.3f hPa" % bme680.pressure)
print("Altitude = %0.2f meters" % bme680.altitude)
file.write("Altitude = %0.2f meters" % bme680.altitude)
time.sleep(1)
#>>> st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
#import datetime
|
[
"#!/usr/bin/env python3\nimport datetime\nimport time\nimport board\nfrom busio import I2C\nimport adafruit_bme680\n\n# Create library object using our Bus I2C port\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\n\n# change this to match the location's pressure (hPa) at sea level\nbme680.sea_level_pressure = 1006.0\nfile = open(\"/home/pi/Payload/src/sensory/burning_data.txt\",\"a\")\n\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write(\"\\ntimestamp: %s \" % st)\n print(\"\\ntimestamp: %s \" % st)\n print(\"Temperature: %0.1f C\" % bme680.temperature)\n file.write(\"Temperature: %0.1f C\" % bme680.temperature)\n print(\"Gas: %d ohm\" % bme680.gas)\n file.write(\"Gas: %d ohm\" % bme680.gas)\n print(\"Humidity: %0.1f %%\" % bme680.humidity)\n file.write(\"Humidity: %0.1f %%\" % bme680.humidity)\n print(\"Pressure: %0.3f hPa\" % bme680.pressure)\n file.write(\"Pressure: %0.3f hPa\" % bme680.pressure)\n print(\"Altitude = %0.2f meters\" % bme680.altitude)\n file.write(\"Altitude = %0.2f meters\" % bme680.altitude)\n time.sleep(1)\n\n\n#>>> st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n#import datetime\n",
"import datetime\nimport time\nimport board\nfrom busio import I2C\nimport adafruit_bme680\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\nbme680.sea_level_pressure = 1006.0\nfile = open('/home/pi/Payload/src/sensory/burning_data.txt', 'a')\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"<import token>\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\nbme680.sea_level_pressure = 1006.0\nfile = open('/home/pi/Payload/src/sensory/burning_data.txt', 'a')\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"<import token>\n<assignment token>\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
9,719 |
76664114382bdeb0bffb996e4dd4448b6c87520d
|
import sys
def ler (t):
i =0
for s in sys.stdin:
l=s.split(" ")
t.append(l)
def melhor (t):
i=1
x=int(t[0][0].strip("\n"))
n=len(t)
while(i<n):
u=int((t[i][2]).strip())
if(u<x)
i+=1
def vendedor():
t=[]
ler(t)
melhor(t)
vendedor()
|
[
"import sys \n\ndef ler (t):\n\ti =0\n\tfor s in sys.stdin:\n\t\tl=s.split(\" \")\n\t\tt.append(l)\n\ndef melhor (t):\n\ti=1\n\tx=int(t[0][0].strip(\"\\n\"))\n\tn=len(t)\n\twhile(i<n):\n\t\tu=int((t[i][2]).strip())\n\t\tif(u<x)\n\t\ti+=1\n\n\n\n\ndef vendedor():\n\tt=[]\n\tler(t)\n\tmelhor(t)\nvendedor()"
] | true |
9,720 |
e0874554c326bb11b53552e362bc8073bb57bc93
|
import widget
import column
class Columns(widget.Widget):
def __init__(self, parent):
super(Columns, self).__init__(parent)
# self.root.mouse.on_drag_release.append(self.on_drag_release)
"""
def on_drag_release(self, x0, y0, x, y):
if not self.contains_point(x0, y0):
return None
if not self.contains_point(x, y):
return None
idx_from = self.get_child_idx_at_point(x0, y0)
idx_to = self.get_child_idx_at_point(x, y)
self.reorder_children(idx_from, idx_to)
"""
def add_column(self):
col = column.Column(self)
self.children.append(col)
def update(self):
area_sum = len(self.children)
ratio_accumulator = 0
for child in self.children:
area_share = 1
area_ratio = float(area_share)/area_sum
x = self.x + ratio_accumulator * self.dx
y = self.y
dx = area_ratio * self.dx
dy = self.dy
child.resize(x, y, dx, dy)
ratio_accumulator += area_ratio
def on_reposition_window(self, target, x, y):
if self.contains_point(x, y):
windows_from = target.parent
windows_to = self.get_child_at_point(x, y).windows
windows_from.detach_child(target)
idx_to = windows_to.get_child_idx_at_point(x, y)
if idx_to is None:
idx_to = 0
else:
idx_to += 1
windows_to.attach_child_at_idx(idx_to, target)
return False
|
[
"import widget\nimport column\n\nclass Columns(widget.Widget):\n def __init__(self, parent):\n super(Columns, self).__init__(parent)\n # self.root.mouse.on_drag_release.append(self.on_drag_release)\n\n \"\"\"\n def on_drag_release(self, x0, y0, x, y):\n if not self.contains_point(x0, y0):\n return None\n if not self.contains_point(x, y):\n return None\n\n idx_from = self.get_child_idx_at_point(x0, y0)\n idx_to = self.get_child_idx_at_point(x, y)\n\n self.reorder_children(idx_from, idx_to)\n \"\"\"\n \n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n\n\n def update(self):\n area_sum = len(self.children)\n ratio_accumulator = 0\n\n for child in self.children:\n area_share = 1\n area_ratio = float(area_share)/area_sum\n\n x = self.x + ratio_accumulator * self.dx\n y = self.y\n dx = area_ratio * self.dx\n dy = self.dy\n\n child.resize(x, y, dx, dy)\n\n ratio_accumulator += area_ratio\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n\n",
"import widget\nimport column\n\n\nclass Columns(widget.Widget):\n\n def __init__(self, parent):\n super(Columns, self).__init__(parent)\n \"\"\"\n def on_drag_release(self, x0, y0, x, y):\n if not self.contains_point(x0, y0):\n return None\n if not self.contains_point(x, y):\n return None\n\n idx_from = self.get_child_idx_at_point(x0, y0)\n idx_to = self.get_child_idx_at_point(x, y)\n\n self.reorder_children(idx_from, idx_to)\n \"\"\"\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n\n def update(self):\n area_sum = len(self.children)\n ratio_accumulator = 0\n for child in self.children:\n area_share = 1\n area_ratio = float(area_share) / area_sum\n x = self.x + ratio_accumulator * self.dx\n y = self.y\n dx = area_ratio * self.dx\n dy = self.dy\n child.resize(x, y, dx, dy)\n ratio_accumulator += area_ratio\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n\n def __init__(self, parent):\n super(Columns, self).__init__(parent)\n \"\"\"\n def on_drag_release(self, x0, y0, x, y):\n if not self.contains_point(x0, y0):\n return None\n if not self.contains_point(x, y):\n return None\n\n idx_from = self.get_child_idx_at_point(x0, y0)\n idx_to = self.get_child_idx_at_point(x, y)\n\n self.reorder_children(idx_from, idx_to)\n \"\"\"\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n\n def update(self):\n area_sum = len(self.children)\n ratio_accumulator = 0\n for child in self.children:\n area_share = 1\n area_ratio = float(area_share) / area_sum\n x = self.x + ratio_accumulator * self.dx\n y = self.y\n dx = area_ratio * self.dx\n dy = self.dy\n child.resize(x, y, dx, dy)\n ratio_accumulator += area_ratio\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n\n def __init__(self, parent):\n super(Columns, self).__init__(parent)\n <docstring token>\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n\n def update(self):\n area_sum = len(self.children)\n ratio_accumulator = 0\n for child in self.children:\n area_share = 1\n area_ratio = float(area_share) / area_sum\n x = self.x + ratio_accumulator * self.dx\n y = self.y\n dx = area_ratio * self.dx\n dy = self.dy\n child.resize(x, y, dx, dy)\n ratio_accumulator += area_ratio\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n\n def __init__(self, parent):\n super(Columns, self).__init__(parent)\n <docstring token>\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n <function token>\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n <function token>\n <docstring token>\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n <function token>\n\n def on_reposition_window(self, target, x, y):\n if self.contains_point(x, y):\n windows_from = target.parent\n windows_to = self.get_child_at_point(x, y).windows\n windows_from.detach_child(target)\n idx_to = windows_to.get_child_idx_at_point(x, y)\n if idx_to is None:\n idx_to = 0\n else:\n idx_to += 1\n windows_to.attach_child_at_idx(idx_to, target)\n return False\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n <function token>\n <docstring token>\n\n def add_column(self):\n col = column.Column(self)\n self.children.append(col)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Columns(widget.Widget):\n <function token>\n <docstring token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,721 |
843901b65a556e57470f73be2657e9fd3c0facc6
|
def parse(num):
strnum = str(num)
words = []
for item in range(len(strnum)-1, -1, -1):
words.append(strnum[item])
hundred = words[:3]
thousand = words[3:6]
million = words[6:len(words)]
hundred = hundred[::-1]
thousand = thousand[::-1]
million = million[::-1]
units = ['zero','one','two','three','four','five','six','seven','eight','nine']
tens = ['ten','eleven','twelve','thirteen','fourteen','fifteen','sixteen','seventeen','eighteen','nineteen']
tens_more = ['zero','ten','twenty','thirty','forty','fifty','sixty','seventy','eighty','ninety']
reads = []
if len(million)>0:
if len(million)==3:
num = int(million[0])
reads.append(units[num])
reads.append('hundred')
reads.append('and')
num = int(million[1])
if num>1:
reads.append(tens_more[num])
if num!=0:
num = int(million[2])
reads.append(units[num])
else:
num = int(million[1])
reads.append(tens[num])
if len(million)==2:
num = int(million[0])
if num>1:
reads.append(tens_more[num])
num = int(million[1])
if num!=0:
reads.append(units[num])
else:
num = int(million[1])
reads.append(tens[num])
if len(million)==1:
num = int(million[0])
reads.append(units[num])
reads.append('million')
reads.append('and')
if __name__ == "__main__":
parse(23456789)
|
[
"def parse(num):\n strnum = str(num)\n words = []\n for item in range(len(strnum)-1, -1, -1):\n words.append(strnum[item])\n\n hundred = words[:3]\n thousand = words[3:6]\n million = words[6:len(words)]\n\n hundred = hundred[::-1]\n thousand = thousand[::-1]\n million = million[::-1]\n\n units = ['zero','one','two','three','four','five','six','seven','eight','nine']\n tens = ['ten','eleven','twelve','thirteen','fourteen','fifteen','sixteen','seventeen','eighteen','nineteen']\n tens_more = ['zero','ten','twenty','thirty','forty','fifty','sixty','seventy','eighty','ninety']\n\n reads = []\n if len(million)>0:\n if len(million)==3:\n num = int(million[0])\n reads.append(units[num])\n reads.append('hundred')\n reads.append('and')\n\n num = int(million[1])\n if num>1:\n reads.append(tens_more[num])\n if num!=0:\n num = int(million[2])\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n\n if len(million)==2:\n num = int(million[0])\n if num>1:\n reads.append(tens_more[num])\n num = int(million[1])\n if num!=0:\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n \n if len(million)==1:\n num = int(million[0])\n reads.append(units[num])\n\n reads.append('million')\n reads.append('and')\n\nif __name__ == \"__main__\":\n parse(23456789)",
"def parse(num):\n strnum = str(num)\n words = []\n for item in range(len(strnum) - 1, -1, -1):\n words.append(strnum[item])\n hundred = words[:3]\n thousand = words[3:6]\n million = words[6:len(words)]\n hundred = hundred[::-1]\n thousand = thousand[::-1]\n million = million[::-1]\n units = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven',\n 'eight', 'nine']\n tens = ['ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen',\n 'sixteen', 'seventeen', 'eighteen', 'nineteen']\n tens_more = ['zero', 'ten', 'twenty', 'thirty', 'forty', 'fifty',\n 'sixty', 'seventy', 'eighty', 'ninety']\n reads = []\n if len(million) > 0:\n if len(million) == 3:\n num = int(million[0])\n reads.append(units[num])\n reads.append('hundred')\n reads.append('and')\n num = int(million[1])\n if num > 1:\n reads.append(tens_more[num])\n if num != 0:\n num = int(million[2])\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n if len(million) == 2:\n num = int(million[0])\n if num > 1:\n reads.append(tens_more[num])\n num = int(million[1])\n if num != 0:\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n if len(million) == 1:\n num = int(million[0])\n reads.append(units[num])\n reads.append('million')\n reads.append('and')\n\n\nif __name__ == '__main__':\n parse(23456789)\n",
"def parse(num):\n strnum = str(num)\n words = []\n for item in range(len(strnum) - 1, -1, -1):\n words.append(strnum[item])\n hundred = words[:3]\n thousand = words[3:6]\n million = words[6:len(words)]\n hundred = hundred[::-1]\n thousand = thousand[::-1]\n million = million[::-1]\n units = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven',\n 'eight', 'nine']\n tens = ['ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen',\n 'sixteen', 'seventeen', 'eighteen', 'nineteen']\n tens_more = ['zero', 'ten', 'twenty', 'thirty', 'forty', 'fifty',\n 'sixty', 'seventy', 'eighty', 'ninety']\n reads = []\n if len(million) > 0:\n if len(million) == 3:\n num = int(million[0])\n reads.append(units[num])\n reads.append('hundred')\n reads.append('and')\n num = int(million[1])\n if num > 1:\n reads.append(tens_more[num])\n if num != 0:\n num = int(million[2])\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n if len(million) == 2:\n num = int(million[0])\n if num > 1:\n reads.append(tens_more[num])\n num = int(million[1])\n if num != 0:\n reads.append(units[num])\n else:\n num = int(million[1])\n reads.append(tens[num])\n if len(million) == 1:\n num = int(million[0])\n reads.append(units[num])\n reads.append('million')\n reads.append('and')\n\n\n<code token>\n",
"<function token>\n<code token>\n"
] | false |
9,722 |
b2a2e06c5db8b12acbc852bafc4ea869b006c1c8
|
import itertools
import urllib
import word2vec
# MSD: http://corpus.leeds.ac.uk/mocky/ru-table.tab
# Universal: http://universaldependencies.org/ru/pos/index.html
def convert_pos_MSD_to_Universal(pos):
if pos.startswith('A'):
return 'ADJ'
elif pos.startswith('C'):
return 'CCONJ'
elif pos.startswith('I'):
return 'INTJ'
elif pos.startswith('M'):
return 'NUM'
elif pos.startswith('Nc'):
return 'NOUN'
elif pos.startswith('Np'):
return 'PROPN'
elif pos.startswith('N'):
return 'NOUN'
elif pos.startswith('P'):
return 'PRON' # TODO: or DET
elif pos.startswith('Q'):
return 'PART'
elif pos.startswith('R'):
return 'ADV'
elif pos.startswith('S'):
return 'ADP'
elif pos.startswith('V'):
return 'VERB' # TODO: or AUX
elif pos.startswith('SENT') or pos.startswith('PUNC'):
return 'PUNCT'
else:
return 'X'
# ------------------
# get_dep_tree(sentence)
# ---
# Creates a word dependency tree from a sentence.
# Returns: deptree=(node, [deptree])
# Creates a deptree from the webservice response dictionary
def make_dep_tree(respDict, idx):
if idx == 0:
el = None
else:
el = respDict[idx]
children = [(k, respDict[k]) for k in respDict if int(respDict[k][6]) == idx]
childTrees = [ make_dep_tree(respDict, k) for (k, c) in children ]
return (el, childTrees)
def get_dep_tree(sentence):
url = 'http://deptree.jental.name/parse?' + urllib.parse.urlencode({'text': sentence})
respRaw = urllib.request.urlopen(url)
resp = respRaw.read()
respStr = resp.decode('utf-8')
respList = [ r[1:-1].split('\\t') for r in respStr[1:-1].split(',') ]
respDict = dict([(int(r[0]), r + [convert_pos_MSD_to_Universal(r[5])]) for r in respList])
(root, trees) = make_dep_tree(respDict, 0)
if len(trees) == 0:
print('No tree', sentence, trees)
return None
else:
return trees[0]
# ------------------
# filter_dep_tree(tree)
# ---
# Filters out invaluable parts of speech.
# Returns: deptree=(node, [deptree])
def filter_dep_tree(tree):
root, children = tree
posp = convert_pos_MSD_to_Universal(root[3])
if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or posp == 'ADV' or posp == 'VERB'):
res = [ (root, list(itertools.chain.from_iterable([ filter_dep_tree(c) for c in children ]))) ]
else:
cd = [ filter_dep_tree(c) for c in children ]
if len(cd) > 0:
res = list(itertools.chain.from_iterable(cd))
else:
res = []
return res
# ------------------
# filter_dep_tree(tree)
# ---
# Prints a word dependency tree
def print_dep_tree(tree):
def pdt(t, offset):
root, children = t
print(''.join([ ' ' for i in range(0, offset) ]), root[1], root[3])
for c in children:
pdt(c, offset + 1)
pdt(tree, 0)
|
[
"import itertools\n\nimport urllib\n\nimport word2vec\n\n# MSD: http://corpus.leeds.ac.uk/mocky/ru-table.tab\n# Universal: http://universaldependencies.org/ru/pos/index.html\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON' # TODO: or DET \n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB' # TODO: or AUX\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n# ------------------\n# get_dep_tree(sentence)\n# ---\n# Creates a word dependency tree from a sentence.\n# Returns: deptree=(node, [deptree])\n\n# Creates a deptree from the webservice response dictionary\ndef make_dep_tree(respDict, idx):\n if idx == 0:\n el = None\n else:\n el = respDict[idx]\n children = [(k, respDict[k]) for k in respDict if int(respDict[k][6]) == idx]\n childTrees = [ make_dep_tree(respDict, k) for (k, c) in children ]\n\n return (el, childTrees)\n\ndef get_dep_tree(sentence):\n url = 'http://deptree.jental.name/parse?' + urllib.parse.urlencode({'text': sentence})\n respRaw = urllib.request.urlopen(url)\n resp = respRaw.read()\n respStr = resp.decode('utf-8')\n respList = [ r[1:-1].split('\\\\t') for r in respStr[1:-1].split(',') ]\n respDict = dict([(int(r[0]), r + [convert_pos_MSD_to_Universal(r[5])]) for r in respList])\n\n (root, trees) = make_dep_tree(respDict, 0)\n\n if len(trees) == 0:\n print('No tree', sentence, trees)\n return None\n else:\n return trees[0]\n\n# ------------------\n# filter_dep_tree(tree)\n# ---\n# Filters out invaluable parts of speech.\n# Returns: deptree=(node, [deptree])\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or posp == 'ADV' or posp == 'VERB'):\n res = [ (root, list(itertools.chain.from_iterable([ filter_dep_tree(c) for c in children ]))) ]\n else:\n cd = [ filter_dep_tree(c) for c in children ]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n# ------------------\n# filter_dep_tree(tree)\n# ---\n# Prints a word dependency tree\n\ndef print_dep_tree(tree):\n def pdt(t, offset):\n root, children = t\n print(''.join([ ' ' for i in range(0, offset) ]), root[1], root[3])\n for c in children:\n pdt(c, offset + 1)\n pdt(tree, 0)\n",
"import itertools\nimport urllib\nimport word2vec\n\n\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON'\n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB'\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n\ndef make_dep_tree(respDict, idx):\n if idx == 0:\n el = None\n else:\n el = respDict[idx]\n children = [(k, respDict[k]) for k in respDict if int(respDict[k][6]) ==\n idx]\n childTrees = [make_dep_tree(respDict, k) for k, c in children]\n return el, childTrees\n\n\ndef get_dep_tree(sentence):\n url = 'http://deptree.jental.name/parse?' + urllib.parse.urlencode({\n 'text': sentence})\n respRaw = urllib.request.urlopen(url)\n resp = respRaw.read()\n respStr = resp.decode('utf-8')\n respList = [r[1:-1].split('\\\\t') for r in respStr[1:-1].split(',')]\n respDict = dict([(int(r[0]), r + [convert_pos_MSD_to_Universal(r[5])]) for\n r in respList])\n root, trees = make_dep_tree(respDict, 0)\n if len(trees) == 0:\n print('No tree', sentence, trees)\n return None\n else:\n return trees[0]\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\ndef print_dep_tree(tree):\n\n def pdt(t, offset):\n root, children = t\n print(''.join([' ' for i in range(0, offset)]), root[1], root[3])\n for c in children:\n pdt(c, offset + 1)\n pdt(tree, 0)\n",
"<import token>\n\n\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON'\n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB'\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n\ndef make_dep_tree(respDict, idx):\n if idx == 0:\n el = None\n else:\n el = respDict[idx]\n children = [(k, respDict[k]) for k in respDict if int(respDict[k][6]) ==\n idx]\n childTrees = [make_dep_tree(respDict, k) for k, c in children]\n return el, childTrees\n\n\ndef get_dep_tree(sentence):\n url = 'http://deptree.jental.name/parse?' + urllib.parse.urlencode({\n 'text': sentence})\n respRaw = urllib.request.urlopen(url)\n resp = respRaw.read()\n respStr = resp.decode('utf-8')\n respList = [r[1:-1].split('\\\\t') for r in respStr[1:-1].split(',')]\n respDict = dict([(int(r[0]), r + [convert_pos_MSD_to_Universal(r[5])]) for\n r in respList])\n root, trees = make_dep_tree(respDict, 0)\n if len(trees) == 0:\n print('No tree', sentence, trees)\n return None\n else:\n return trees[0]\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\ndef print_dep_tree(tree):\n\n def pdt(t, offset):\n root, children = t\n print(''.join([' ' for i in range(0, offset)]), root[1], root[3])\n for c in children:\n pdt(c, offset + 1)\n pdt(tree, 0)\n",
"<import token>\n\n\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON'\n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB'\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n\ndef make_dep_tree(respDict, idx):\n if idx == 0:\n el = None\n else:\n el = respDict[idx]\n children = [(k, respDict[k]) for k in respDict if int(respDict[k][6]) ==\n idx]\n childTrees = [make_dep_tree(respDict, k) for k, c in children]\n return el, childTrees\n\n\n<function token>\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\ndef print_dep_tree(tree):\n\n def pdt(t, offset):\n root, children = t\n print(''.join([' ' for i in range(0, offset)]), root[1], root[3])\n for c in children:\n pdt(c, offset + 1)\n pdt(tree, 0)\n",
"<import token>\n\n\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON'\n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB'\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n\n<function token>\n<function token>\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\ndef print_dep_tree(tree):\n\n def pdt(t, offset):\n root, children = t\n print(''.join([' ' for i in range(0, offset)]), root[1], root[3])\n for c in children:\n pdt(c, offset + 1)\n pdt(tree, 0)\n",
"<import token>\n\n\ndef convert_pos_MSD_to_Universal(pos):\n if pos.startswith('A'):\n return 'ADJ'\n elif pos.startswith('C'):\n return 'CCONJ'\n elif pos.startswith('I'):\n return 'INTJ'\n elif pos.startswith('M'):\n return 'NUM'\n elif pos.startswith('Nc'):\n return 'NOUN'\n elif pos.startswith('Np'):\n return 'PROPN'\n elif pos.startswith('N'):\n return 'NOUN'\n elif pos.startswith('P'):\n return 'PRON'\n elif pos.startswith('Q'):\n return 'PART'\n elif pos.startswith('R'):\n return 'ADV'\n elif pos.startswith('S'):\n return 'ADP'\n elif pos.startswith('V'):\n return 'VERB'\n elif pos.startswith('SENT') or pos.startswith('PUNC'):\n return 'PUNCT'\n else:\n return 'X'\n\n\n<function token>\n<function token>\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\ndef filter_dep_tree(tree):\n root, children = tree\n posp = convert_pos_MSD_to_Universal(root[3])\n if (posp == 'ADJ' or posp == 'NUM' or posp == 'NOUN' or posp == 'PROPN' or\n posp == 'ADV' or posp == 'VERB'):\n res = [(root, list(itertools.chain.from_iterable([filter_dep_tree(c\n ) for c in children])))]\n else:\n cd = [filter_dep_tree(c) for c in children]\n if len(cd) > 0:\n res = list(itertools.chain.from_iterable(cd))\n else:\n res = []\n return res\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
9,723 |
fb82724aab7e0819c9921d41dcb612b304b25753
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 차트에 한글 가능하도록
from matplotlib import font_manager, rc, rcParams
font_name = font_manager.FontProperties(
fname="c:/windows/Fonts/malgun.ttf").get_name()
rc('font',family=font_name)
rcParams['axes.unicode_minus'] = False # 부호표시 (-,+) 사용할때
###
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# 100행 3열 랜덤생성 2019,1,1 부터 100일
df1 = pd.DataFrame(np.random.randn(100, 3), index=pd.date_range('1/1/2019', periods=100),
columns=['A','B','C']).cumsum() # 값을 누적 시켜 넣는다.
print(df1)
# pandas 의 DataFrame 에서 내부적으로 matplotlib 를 import 해서 연결되어 있기때문에 plot 함수를 사용해서 그려준다.
df1.plot()
plt.show()
|
[
"import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n# 차트에 한글 가능하도록\nfrom matplotlib import font_manager, rc, rcParams\nfont_name = font_manager.FontProperties(\n fname=\"c:/windows/Fonts/malgun.ttf\").get_name()\nrc('font',family=font_name)\nrcParams['axes.unicode_minus'] = False # 부호표시 (-,+) 사용할때\n###\n#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n\n# 100행 3열 랜덤생성 2019,1,1 부터 100일\ndf1 = pd.DataFrame(np.random.randn(100, 3), index=pd.date_range('1/1/2019', periods=100),\n columns=['A','B','C']).cumsum() # 값을 누적 시켜 넣는다.\n\nprint(df1)\n\n# pandas 의 DataFrame 에서 내부적으로 matplotlib 를 import 해서 연결되어 있기때문에 plot 함수를 사용해서 그려준다.\ndf1.plot()\nplt.show()\n\n",
"import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import font_manager, rc, rcParams\nfont_name = font_manager.FontProperties(fname='c:/windows/Fonts/malgun.ttf'\n ).get_name()\nrc('font', family=font_name)\nrcParams['axes.unicode_minus'] = False\ndf1 = pd.DataFrame(np.random.randn(100, 3), index=pd.date_range('1/1/2019',\n periods=100), columns=['A', 'B', 'C']).cumsum()\nprint(df1)\ndf1.plot()\nplt.show()\n",
"<import token>\nfont_name = font_manager.FontProperties(fname='c:/windows/Fonts/malgun.ttf'\n ).get_name()\nrc('font', family=font_name)\nrcParams['axes.unicode_minus'] = False\ndf1 = pd.DataFrame(np.random.randn(100, 3), index=pd.date_range('1/1/2019',\n periods=100), columns=['A', 'B', 'C']).cumsum()\nprint(df1)\ndf1.plot()\nplt.show()\n",
"<import token>\n<assignment token>\nrc('font', family=font_name)\n<assignment token>\nprint(df1)\ndf1.plot()\nplt.show()\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,724 |
4ea266d4f4c18efbba4204d7301652f8966c18a5
|
# -*- coding: utf-8 -*-
"""
Animation practical output
The code that follows builds on the "Communications.py" file
Additional code that follows has in part been modified from that of
https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/index.html
https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel.py
https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel2.py
"""
import random
import operator
import matplotlib.pyplot
import matplotlib.animation
import agentframeworkanimate
import csv
# Reading the in.txt file to create the environment.
with open("in.txt", newline="") as raster:
dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)
environment = []
for row in dataset:
rowlist = []
for value in row:
rowlist.append(value)
environment.append(rowlist)
# Setting initial parameters.
num_of_agents = 10
num_of_iterations = 100
neighbourhood = 20
agents = []
# Variables to animate the model.
fig = matplotlib.pyplot.figure(figsize=(7, 7))
ax = fig.add_axes([0, 0, 1, 1])
ax.set_autoscale_on(False)
# Make the agents.
# Addition of environment as argument for Agent class to allow interaction between agents and environment.
# Addition of agents as argument for Agent class to allow agents to interact with each other.
for i in range(num_of_agents):
agents.append(agentframeworkanimate.Agent(environment, agents))
carry_on = True
# Creating model animation.
def update(frame_number):
fig.clear()
global carry_on
# Move the agents and store what they eat
for j in range(num_of_iterations):
# Shuffle function used to randomise the order agents are processed with each iteration.
random.shuffle(agents)
for i in range(num_of_agents):
agents[i].move()
agents[i].eat()
agents[i].share_with_neighbours(neighbourhood)
# Stopping condition for animation when all agents have 100 in their store.
if agents[i].store == 100:
carry_on = False
print("Stopping condition met")
# Generate scatterplot of agents after model iterations.
matplotlib.pyplot.xlim(0, 99)
matplotlib.pyplot.ylim(0, 99)
matplotlib.pyplot.imshow(environment)
for i in range(num_of_agents):
matplotlib.pyplot.scatter(agents[i].x,agents[i].y)
# Generator function to stop animation.
# Will stop animation after 10 iterations unless carry_on variable is set to False.
def gen_function(b = [0]):
a = 0
global carry_on
while (a < 100) & (carry_on):
yield a
a = a + 1
# Animation will run until generator function condition is met
#animation = matplotlib.animation.FuncAnimation(fig, update, interval=1, repeat=False, frames=10)
animation = matplotlib.animation.FuncAnimation(fig, update, frames=gen_function, repeat=False)
matplotlib.pyplot.show()
# Writing the final environment to a text file.
with open("out.txt", "w", newline="") as finalenviron:
writer = csv.writer(finalenviron, delimiter=",")
for row in environment:
writer.writerow(row)
|
[
"# -*- coding: utf-8 -*-\n\"\"\"\nAnimation practical output\n\nThe code that follows builds on the \"Communications.py\" file\n\nAdditional code that follows has in part been modified from that of\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/index.html\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel.py\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel2.py\n\"\"\"\n\nimport random\nimport operator\nimport matplotlib.pyplot\nimport matplotlib.animation\nimport agentframeworkanimate\nimport csv\n\n\n# Reading the in.txt file to create the environment.\nwith open(\"in.txt\", newline=\"\") as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\n \n# Setting initial parameters.\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\n\n# Variables to animate the model.\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\n\nax.set_autoscale_on(False)\n\n# Make the agents.\n# Addition of environment as argument for Agent class to allow interaction between agents and environment.\n# Addition of agents as argument for Agent class to allow agents to interact with each other.\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\n\n\ncarry_on = True\n\n# Creating model animation.\ndef update(frame_number):\n fig.clear()\n global carry_on \n\n# Move the agents and store what they eat\n for j in range(num_of_iterations):\n # Shuffle function used to randomise the order agents are processed with each iteration.\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n \n # Stopping condition for animation when all agents have 100 in their store.\n if agents[i].store == 100:\n carry_on = False\n print(\"Stopping condition met\")\n\n # Generate scatterplot of agents after model iterations.\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment) \n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x,agents[i].y)\n \n# Generator function to stop animation.\n# Will stop animation after 10 iterations unless carry_on variable is set to False.\ndef gen_function(b = [0]):\n a = 0\n global carry_on\n while (a < 100) & (carry_on):\n yield a\n a = a + 1 \n\n# Animation will run until generator function condition is met\n#animation = matplotlib.animation.FuncAnimation(fig, update, interval=1, repeat=False, frames=10)\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=gen_function, repeat=False)\n\nmatplotlib.pyplot.show()\n\n \n# Writing the final environment to a text file.\nwith open(\"out.txt\", \"w\", newline=\"\") as finalenviron:\n writer = csv.writer(finalenviron, delimiter=\",\")\n for row in environment:\n writer.writerow(row)\n",
"<docstring token>\nimport random\nimport operator\nimport matplotlib.pyplot\nimport matplotlib.animation\nimport agentframeworkanimate\nimport csv\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\ncarry_on = True\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=\n gen_function, repeat=False)\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n",
"<docstring token>\n<import token>\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\ncarry_on = True\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=\n gen_function, repeat=False)\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n",
"<docstring token>\n<import token>\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\n<assignment token>\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\n<assignment token>\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\n<assignment token>\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n",
"<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\n<assignment token>\n<code token>\n",
"<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\n<assignment token>\n<code token>\n",
"<docstring token>\n<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
9,725 |
c2b6e51622681ac916e860ed4ff5715808dff102
|
import numpy as np
import matplotlib as plt
import math
from DoublePendulum import DP #imports useful modules and double pendulum class from DoublePendulum.py
import json
import pandas as pd
import copy
from pathlib import Path
#accessing config file
with open('config.json') as config_file:
initdata = json.load(config_file)
#retrieving variables from config file
initMA = initdata['Mass A']
initMB = initdata['Mass B']
initLA = initdata['Length A']
initLB = initdata['Length B']
initAA = initdata['Angle A']
initAB = initdata['Angle B']
method = initdata['Method']
timeStep = initdata['Time Step']
nCycles = initdata['Number of Cycles']
# Setting Initial Conditions based on the config file
pend = DP(initMA,initMB,initLA,initLB,math.radians(initAA),math.radians(initAB),[0,0],[0,0],[0,0],[0,0],0,0,1,1,1,1,1,1,1)
pend.updCartesian()
pend.updEnergies()
data = []
time = 0
x1 = 0
x2 = 0
y1 = 0
y2 = 0
if method == 1:
for n in range(nCycles):
#print(n)
time += timeStep
pend.updEuler(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
print(p22)
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12), copy.deepcopy(p21), copy.deepcopy(p22)]
data.append(item)
elif method == 2:
for n in range(nCycles):
print(n)
time += timeStep
pend.updEulerCromer(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]
data.append(item)
elif method == 3:
for n in range(nCycles):
print(n)
time += timeStep
pend.updRungeKutta(timeStep)
pend.updCartesian()
pend.updEnergies()
pend.updMomentum()
x1 = pend.xy1[0]
x2 = pend.xy2[0]
y1 = pend.xy1[1]
y2 = pend.xy2[1]
p11 = pend.p1[0]
p12 = pend.p1[1]
p21 = pend.p2[0]
p22 = pend.p2[1]
item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]
data.append(item)
else:
print('invalid method selection, update config file')
exit()
np.save(Path.cwd()/'datafile', data, allow_pickle=True)
print('data file saved')
|
[
"import numpy as np \r\nimport matplotlib as plt\r\nimport math\r\nfrom DoublePendulum import DP #imports useful modules and double pendulum class from DoublePendulum.py\r\nimport json\r\nimport pandas as pd\r\nimport copy\r\nfrom pathlib import Path\r\n\r\n#accessing config file\r\nwith open('config.json') as config_file:\r\n initdata = json.load(config_file)\r\n\r\n#retrieving variables from config file\r\ninitMA = initdata['Mass A']\r\ninitMB = initdata['Mass B']\r\ninitLA = initdata['Length A']\r\ninitLB = initdata['Length B']\r\ninitAA = initdata['Angle A']\r\ninitAB = initdata['Angle B']\r\nmethod = initdata['Method']\r\ntimeStep = initdata['Time Step']\r\nnCycles = initdata['Number of Cycles']\r\n\r\n# Setting Initial Conditions based on the config file\r\npend = DP(initMA,initMB,initLA,initLB,math.radians(initAA),math.radians(initAB),[0,0],[0,0],[0,0],[0,0],0,0,1,1,1,1,1,1,1)\r\npend.updCartesian()\r\npend.updEnergies()\r\ndata = []\r\ntime = 0\r\nx1 = 0\r\nx2 = 0\r\ny1 = 0\r\ny2 = 0\r\n\r\nif method == 1:\r\n for n in range(nCycles):\r\n #print(n)\r\n time += timeStep\r\n pend.updEuler(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n print(p22)\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12), copy.deepcopy(p21), copy.deepcopy(p22)]\r\n data.append(item)\r\nelif method == 2:\r\n for n in range(nCycles):\r\n print(n)\r\n time += timeStep\r\n pend.updEulerCromer(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]\r\n data.append(item)\r\nelif method == 3:\r\n for n in range(nCycles):\r\n print(n)\r\n time += timeStep\r\n pend.updRungeKutta(timeStep)\r\n pend.updCartesian()\r\n pend.updEnergies()\r\n pend.updMomentum()\r\n x1 = pend.xy1[0]\r\n x2 = pend.xy2[0]\r\n y1 = pend.xy1[1]\r\n y2 = pend.xy2[1]\r\n p11 = pend.p1[0]\r\n p12 = pend.p1[1]\r\n p21 = pend.p2[0]\r\n p22 = pend.p2[1]\r\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1), copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy(pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy(y1), copy.deepcopy(y2),copy.deepcopy(p11),copy.deepcopy(p12),copy.deepcopy(p21),copy.deepcopy(p22)]\r\n data.append(item)\r\nelse:\r\n print('invalid method selection, update config file')\r\n exit()\r\n\r\nnp.save(Path.cwd()/'datafile', data, allow_pickle=True)\r\nprint('data file saved')\r\n\r\n\r\n\r\n",
"import numpy as np\nimport matplotlib as plt\nimport math\nfrom DoublePendulum import DP\nimport json\nimport pandas as pd\nimport copy\nfrom pathlib import Path\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\ninitMA = initdata['Mass A']\ninitMB = initdata['Mass B']\ninitLA = initdata['Length A']\ninitLB = initdata['Length B']\ninitAA = initdata['Angle A']\ninitAB = initdata['Angle B']\nmethod = initdata['Method']\ntimeStep = initdata['Time Step']\nnCycles = initdata['Number of Cycles']\npend = DP(initMA, initMB, initLA, initLB, math.radians(initAA), math.\n radians(initAB), [0, 0], [0, 0], [0, 0], [0, 0], 0, 0, 1, 1, 1, 1, 1, 1, 1)\npend.updCartesian()\npend.updEnergies()\ndata = []\ntime = 0\nx1 = 0\nx2 = 0\ny1 = 0\ny2 = 0\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"<import token>\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\ninitMA = initdata['Mass A']\ninitMB = initdata['Mass B']\ninitLA = initdata['Length A']\ninitLB = initdata['Length B']\ninitAA = initdata['Angle A']\ninitAB = initdata['Angle B']\nmethod = initdata['Method']\ntimeStep = initdata['Time Step']\nnCycles = initdata['Number of Cycles']\npend = DP(initMA, initMB, initLA, initLB, math.radians(initAA), math.\n radians(initAB), [0, 0], [0, 0], [0, 0], [0, 0], 0, 0, 1, 1, 1, 1, 1, 1, 1)\npend.updCartesian()\npend.updEnergies()\ndata = []\ntime = 0\nx1 = 0\nx2 = 0\ny1 = 0\ny2 = 0\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"<import token>\nwith open('config.json') as config_file:\n initdata = json.load(config_file)\n<assignment token>\npend.updCartesian()\npend.updEnergies()\n<assignment token>\nif method == 1:\n for n in range(nCycles):\n time += timeStep\n pend.updEuler(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n print(p22)\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 2:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updEulerCromer(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelif method == 3:\n for n in range(nCycles):\n print(n)\n time += timeStep\n pend.updRungeKutta(timeStep)\n pend.updCartesian()\n pend.updEnergies()\n pend.updMomentum()\n x1 = pend.xy1[0]\n x2 = pend.xy2[0]\n y1 = pend.xy1[1]\n y2 = pend.xy2[1]\n p11 = pend.p1[0]\n p12 = pend.p1[1]\n p21 = pend.p2[0]\n p22 = pend.p2[1]\n item = [time, copy.deepcopy(pend.totalE), copy.deepcopy(pend.KE1),\n copy.deepcopy(pend.KE2), copy.deepcopy(pend.PE1), copy.deepcopy\n (pend.PE2), copy.deepcopy(x1), copy.deepcopy(x2), copy.deepcopy\n (y1), copy.deepcopy(y2), copy.deepcopy(p11), copy.deepcopy(p12),\n copy.deepcopy(p21), copy.deepcopy(p22)]\n data.append(item)\nelse:\n print('invalid method selection, update config file')\n exit()\nnp.save(Path.cwd() / 'datafile', data, allow_pickle=True)\nprint('data file saved')\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,726 |
e686d8617360c5a3ce35bd4d2bdeb2376b33f53a
|
#!/usr/bin/env python
import re
pdfs_file = './pdf_names_2017.txt'
sessions_file = './session_names_2017.txt'
with open(pdfs_file) as f:
pdf_names = f.read().splitlines()
with open(sessions_file) as f:
session_names = f.read().splitlines()
#for i in xrange(0,len(pdf_names)):
# print str(i+1).zfill(3) + '_-_' + pdf_names[i][:-4] + '_-_' + session_names[i] + pdf_names[i][-4:]
card_pre = """
<section class="section--center mdl-grid mdl-grid--no-spacing mdl-shadow--2dp">
<header class="section__play-btn mdl-cell mdl-cell--3-col-desktop mdl-cell--2-col-tablet mdl-cell--4-col-phone mdl-color--teal-100 mdl-color-text--white">
<i class="material-icons">record_voice_over</i>
</header>
<div class="mdl-card mdl-cell mdl-cell--9-col-desktop mdl-cell--6-col-tablet mdl-cell--4-col-phone">
<div class="mdl-card__supporting-text">
"""
card_content = """
<h4>Incidental_Findings_-_Introduction_and_Overview</h4>
Monday_0700_LBerland
"""
card_post_1 = """
</div>
<div class="mdl-card__actions">
<a href="pdf/"""
card_post_2 = """" target="_blank" class="mdl-button">Handout</a>
</div>
</div>
</section>
"""
"""
<section class="section--center mdl-grid mdl-grid--no-spacing mdl-shadow--2dp">
<header class="section__play-btn mdl-cell mdl-cell--3-col-desktop mdl-cell--2-col-tablet mdl-cell--4-col-phone mdl-color--teal-100 mdl-color-text--white">
<i class="material-icons">record_voice_over</i>
</header>
<div class="mdl-card mdl-cell mdl-cell--9-col-desktop mdl-cell--6-col-tablet mdl-cell--4-col-phone">
<div class="mdl-card__supporting-text">
<h4>Incidental_Findings_-_Introduction_and_Overview</h4>
Monday_0700_LBerland
</div>
<div class="mdl-card__actions">
<a href="#" class="mdl-button">Handout</a>
</div>
</div>
</section>
"""
for i in xrange(0,len(pdf_names)):
print card_pre + "<h4>" + session_names[i] + "</h4>" + pdf_names[i][:-4].replace("_"," ") + card_post_1 + pdf_names[i] + card_post_2
|
[
"#!/usr/bin/env python\n\nimport re\n\n\npdfs_file = './pdf_names_2017.txt'\nsessions_file = './session_names_2017.txt'\n\nwith open(pdfs_file) as f:\n pdf_names = f.read().splitlines()\n\nwith open(sessions_file) as f:\n session_names = f.read().splitlines()\n\n#for i in xrange(0,len(pdf_names)):\n# print str(i+1).zfill(3) + '_-_' + pdf_names[i][:-4] + '_-_' + session_names[i] + pdf_names[i][-4:]\n\n\ncard_pre = \"\"\"\n<section class=\"section--center mdl-grid mdl-grid--no-spacing mdl-shadow--2dp\">\n <header class=\"section__play-btn mdl-cell mdl-cell--3-col-desktop mdl-cell--2-col-tablet mdl-cell--4-col-phone mdl-color--teal-100 mdl-color-text--white\">\n <i class=\"material-icons\">record_voice_over</i>\n </header>\n <div class=\"mdl-card mdl-cell mdl-cell--9-col-desktop mdl-cell--6-col-tablet mdl-cell--4-col-phone\">\n <div class=\"mdl-card__supporting-text\">\n\"\"\"\n\ncard_content = \"\"\" \n<h4>Incidental_Findings_-_Introduction_and_Overview</h4>\n Monday_0700_LBerland\n\"\"\"\n\ncard_post_1 = \"\"\"\n </div>\n <div class=\"mdl-card__actions\">\n <a href=\"pdf/\"\"\"\n\n\ncard_post_2 = \"\"\"\" target=\"_blank\" class=\"mdl-button\">Handout</a>\n </div>\n </div>\n</section>\n\"\"\"\n\n\"\"\"\n<section class=\"section--center mdl-grid mdl-grid--no-spacing mdl-shadow--2dp\">\n <header class=\"section__play-btn mdl-cell mdl-cell--3-col-desktop mdl-cell--2-col-tablet mdl-cell--4-col-phone mdl-color--teal-100 mdl-color-text--white\">\n <i class=\"material-icons\">record_voice_over</i>\n </header>\n <div class=\"mdl-card mdl-cell mdl-cell--9-col-desktop mdl-cell--6-col-tablet mdl-cell--4-col-phone\">\n <div class=\"mdl-card__supporting-text\">\n <h4>Incidental_Findings_-_Introduction_and_Overview</h4>\n Monday_0700_LBerland\n </div>\n <div class=\"mdl-card__actions\">\n <a href=\"#\" class=\"mdl-button\">Handout</a>\n </div>\n </div>\n</section>\n\"\"\"\n\nfor i in xrange(0,len(pdf_names)):\n print card_pre + \"<h4>\" + session_names[i] + \"</h4>\" + pdf_names[i][:-4].replace(\"_\",\" \") + card_post_1 + pdf_names[i] + card_post_2\n\n"
] | true |
9,727 |
5cb390b06026bc0899c0b10dc93f3ec1f2ffefa6
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "Sponge_sy"
# Date: 2021/9/11
import numpy
from tqdm import tqdm
from bert4keras.tokenizers import Tokenizer
from bert4keras.models import build_transformer_model
from bert4keras.snippets import sequence_padding, DataGenerator
from utils import *
class data_generator(DataGenerator):
"""Data Generator"""
def __init__(self, pattern="", is_pre=True, *args, **kwargs):
super(data_generator, self).__init__(*args, **kwargs)
self.pattern = pattern
self.is_pre = is_pre
def __iter__(self, random=False):
batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []
for is_end, text in self.sample(random):
if (self.is_pre):
token_ids, segment_ids = tokenizer.encode(first_text=self.pattern, second_text=text, maxlen=maxlen)
else:
token_ids, segment_ids = tokenizer.encode(first_text=text, second_text=self.pattern, maxlen=maxlen)
source_ids, target_ids = token_ids[:], token_ids[:]
batch_token_ids.append(source_ids)
batch_segment_ids.append(segment_ids)
if len(batch_token_ids) == self.batch_size or is_end:
batch_token_ids = sequence_padding(batch_token_ids)
batch_segment_ids = sequence_padding(batch_segment_ids)
yield [batch_token_ids, batch_segment_ids], None
batch_token_ids, batch_segment_ids, = [], []
def predict(data_generator_list, data):
print("\n*******************Start to Zero-Shot predict*******************", flush=True)
patterns_logits = [[] for _ in patterns]
samples_logits = [[] for _ in data]
for i in range(len(data_generator_list)):
print("\nPattern{}".format(i), flush=True)
data_generator = data_generator_list[i]
counter = 0
for (x, _) in tqdm(data_generator):
outputs = model.predict(x[:2])
for out in outputs:
logit_pos = out[0].T
patterns_logits[i].append(logit_pos)
samples_logits[counter].append(logit_pos)
counter += 1
preds = []
for i in range(len(patterns_logits[0])):
pred = numpy.argmax([logits[i] for logits in patterns_logits])
preds.append(int(pred))
return preds, samples_logits
if __name__ == "__main__":
# Load the hyper-parameters-----------------------------------------------------------
maxlen = 128 # The max length 128 is used in our paper
batch_size = 40 # Will not influence the results
# Choose a model----------------------------------------------------------------------
# Recommend to use 'uer-mixed-bert-base'
# model_names = ['google-bert', 'google-bert-small', 'google-bert-zh',
# 'hfl-bert-wwm', 'hfl-bert-wwm-ext',
# 'uer-mixed-bert-tiny', 'uer-mixed-bert-small',
# 'uer-mixed-bert-base', 'uer-mixed-bert-large']
model_name = 'uer-mixed-bert-base'
# Choose a dataset----------------------------------------------------------------------
# dataset_names = ['eprstmt', 'tnews', 'csldcp', 'iflytek']
# dataset_name = 'eprstmt'
# Load model and dataset class
bert_model = Model(model_name=model_name)
# Create a template --------------------------------------------------------------------
label_names = ['entertainment', 'sports', 'music', 'games', 'economics', 'education']
patterns = ["This is {} news".format(label) for label in label_names]
# Prefix or Suffix-------------------------------------------------------------------
is_pre = True
# Load the demo set--------------------------------------------------------------------
demo_data_en = ['FIFA unveils biennial World Cup plan, UEFA threatens boycott',
'COVID vaccines hold up against severe Delta: US data',
'Justin Drew Bieber was born on March 1, 1994 at St. ',
'Horizon launches latest chip to take on global rivals',
'Twitch video gamers rise up to stop ‘hate raids’']
demo_data = demo_data_en
demo_generator_list = []
for p in patterns:
demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre, data=demo_data, batch_size=batch_size))
# Build BERT model---------------------------------------------------------------------
tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)
# Load BERET model with NSP head
model = build_transformer_model(
config_path='.' + bert_model.config_path, checkpoint_path='.' + bert_model.checkpoint_path, with_nsp=True,
)
# Zero-Shot predict and evaluate-------------------------------------------------------
preds, samples_logits = predict(demo_generator_list, demo_data)
for i, (p, d) in enumerate(zip(preds, demo_data)):
pred_label = label_names[p]
print("Sample {}:".format(i))
print("Original Text: {}".format(d))
print("Predict label: {}".format(pred_label))
print("Logits: {}".format(samples_logits[i]))
print()
|
[
"#! /usr/bin/env python\n# -*- coding: utf-8 -*-\n# __author__ = \"Sponge_sy\"\n# Date: 2021/9/11\n\n\nimport numpy\nfrom tqdm import tqdm\nfrom bert4keras.tokenizers import Tokenizer\nfrom bert4keras.models import build_transformer_model\nfrom bert4keras.snippets import sequence_padding, DataGenerator\nfrom utils import *\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern=\"\", is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if (self.is_pre):\n token_ids, segment_ids = tokenizer.encode(first_text=self.pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text, second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids, = [], []\n\ndef predict(data_generator_list, data):\n print(\"\\n*******************Start to Zero-Shot predict*******************\", flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print(\"\\nPattern{}\".format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for (x, _) in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\nif __name__ == \"__main__\":\n\n # Load the hyper-parameters-----------------------------------------------------------\n maxlen = 128 # The max length 128 is used in our paper\n batch_size = 40 # Will not influence the results\n\n # Choose a model----------------------------------------------------------------------\n # Recommend to use 'uer-mixed-bert-base'\n # model_names = ['google-bert', 'google-bert-small', 'google-bert-zh',\n # 'hfl-bert-wwm', 'hfl-bert-wwm-ext',\n # 'uer-mixed-bert-tiny', 'uer-mixed-bert-small',\n # 'uer-mixed-bert-base', 'uer-mixed-bert-large']\n model_name = 'uer-mixed-bert-base'\n\n # Choose a dataset----------------------------------------------------------------------\n # dataset_names = ['eprstmt', 'tnews', 'csldcp', 'iflytek']\n # dataset_name = 'eprstmt'\n\n # Load model and dataset class\n bert_model = Model(model_name=model_name)\n\n # Create a template --------------------------------------------------------------------\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics', 'education']\n patterns = [\"This is {} news\".format(label) for label in label_names]\n\n # Prefix or Suffix-------------------------------------------------------------------\n is_pre = True\n\n # Load the demo set--------------------------------------------------------------------\n\n demo_data_en = ['FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre, data=demo_data, batch_size=batch_size))\n\n # Build BERT model---------------------------------------------------------------------\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n # Load BERET model with NSP head\n model = build_transformer_model(\n config_path='.' + bert_model.config_path, checkpoint_path='.' + bert_model.checkpoint_path, with_nsp=True,\n )\n\n # Zero-Shot predict and evaluate-------------------------------------------------------\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print(\"Sample {}:\".format(i))\n print(\"Original Text: {}\".format(d))\n print(\"Predict label: {}\".format(pred_label))\n print(\"Logits: {}\".format(samples_logits[i]))\n print()\n",
"import numpy\nfrom tqdm import tqdm\nfrom bert4keras.tokenizers import Tokenizer\nfrom bert4keras.models import build_transformer_model\nfrom bert4keras.snippets import sequence_padding, DataGenerator\nfrom utils import *\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\ndef predict(data_generator_list, data):\n print('\\n*******************Start to Zero-Shot predict*******************',\n flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print('\\nPattern{}'.format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for x, _ in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\n\nif __name__ == '__main__':\n maxlen = 128\n batch_size = 40\n model_name = 'uer-mixed-bert-base'\n bert_model = Model(model_name=model_name)\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics',\n 'education']\n patterns = ['This is {} news'.format(label) for label in label_names]\n is_pre = True\n demo_data_en = [\n 'FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre,\n data=demo_data, batch_size=batch_size))\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n model = build_transformer_model(config_path='.' + bert_model.\n config_path, checkpoint_path='.' + bert_model.checkpoint_path,\n with_nsp=True)\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print('Sample {}:'.format(i))\n print('Original Text: {}'.format(d))\n print('Predict label: {}'.format(pred_label))\n print('Logits: {}'.format(samples_logits[i]))\n print()\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\ndef predict(data_generator_list, data):\n print('\\n*******************Start to Zero-Shot predict*******************',\n flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print('\\nPattern{}'.format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for x, _ in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\n\nif __name__ == '__main__':\n maxlen = 128\n batch_size = 40\n model_name = 'uer-mixed-bert-base'\n bert_model = Model(model_name=model_name)\n label_names = ['entertainment', 'sports', 'music', 'games', 'economics',\n 'education']\n patterns = ['This is {} news'.format(label) for label in label_names]\n is_pre = True\n demo_data_en = [\n 'FIFA unveils biennial World Cup plan, UEFA threatens boycott',\n 'COVID vaccines hold up against severe Delta: US data',\n 'Justin Drew Bieber was born on March 1, 1994 at St. ',\n 'Horizon launches latest chip to take on global rivals',\n 'Twitch video gamers rise up to stop ‘hate raids’']\n demo_data = demo_data_en\n demo_generator_list = []\n for p in patterns:\n demo_generator_list.append(data_generator(pattern=p, is_pre=is_pre,\n data=demo_data, batch_size=batch_size))\n tokenizer = Tokenizer('.' + bert_model.dict_path, do_lower_case=True)\n model = build_transformer_model(config_path='.' + bert_model.\n config_path, checkpoint_path='.' + bert_model.checkpoint_path,\n with_nsp=True)\n preds, samples_logits = predict(demo_generator_list, demo_data)\n for i, (p, d) in enumerate(zip(preds, demo_data)):\n pred_label = label_names[p]\n print('Sample {}:'.format(i))\n print('Original Text: {}'.format(d))\n print('Predict label: {}'.format(pred_label))\n print('Logits: {}'.format(samples_logits[i]))\n print()\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\ndef predict(data_generator_list, data):\n print('\\n*******************Start to Zero-Shot predict*******************',\n flush=True)\n patterns_logits = [[] for _ in patterns]\n samples_logits = [[] for _ in data]\n for i in range(len(data_generator_list)):\n print('\\nPattern{}'.format(i), flush=True)\n data_generator = data_generator_list[i]\n counter = 0\n for x, _ in tqdm(data_generator):\n outputs = model.predict(x[:2])\n for out in outputs:\n logit_pos = out[0].T\n patterns_logits[i].append(logit_pos)\n samples_logits[counter].append(logit_pos)\n counter += 1\n preds = []\n for i in range(len(patterns_logits[0])):\n pred = numpy.argmax([logits[i] for logits in patterns_logits])\n preds.append(int(pred))\n return preds, samples_logits\n\n\n<code token>\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n \"\"\"Data Generator\"\"\"\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\n<function token>\n<code token>\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n <docstring token>\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n\n def __iter__(self, random=False):\n batch_token_ids, batch_segment_ids, batch_output_ids = [], [], []\n for is_end, text in self.sample(random):\n if self.is_pre:\n token_ids, segment_ids = tokenizer.encode(first_text=self.\n pattern, second_text=text, maxlen=maxlen)\n else:\n token_ids, segment_ids = tokenizer.encode(first_text=text,\n second_text=self.pattern, maxlen=maxlen)\n source_ids, target_ids = token_ids[:], token_ids[:]\n batch_token_ids.append(source_ids)\n batch_segment_ids.append(segment_ids)\n if len(batch_token_ids) == self.batch_size or is_end:\n batch_token_ids = sequence_padding(batch_token_ids)\n batch_segment_ids = sequence_padding(batch_segment_ids)\n yield [batch_token_ids, batch_segment_ids], None\n batch_token_ids, batch_segment_ids = [], []\n\n\n<function token>\n<code token>\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n <docstring token>\n\n def __init__(self, pattern='', is_pre=True, *args, **kwargs):\n super(data_generator, self).__init__(*args, **kwargs)\n self.pattern = pattern\n self.is_pre = is_pre\n <function token>\n\n\n<function token>\n<code token>\n",
"<import token>\n\n\nclass data_generator(DataGenerator):\n <docstring token>\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<function token>\n<code token>\n"
] | false |
9,728 |
c9bc331f4805a956146619c59d183fc3bcbe47cb
|
from conans import ConanFile, CMake, tools
import os
class Demo(ConanFile):
name = "Demo"
version = "0.1"
license = "<Put the package license here>"
url = "<Package recipe repository url here, for issues about the package>"
description = "<Description of Testlib here>"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*"
requires = "TestLib/0.1@gbmhunter/testing"
def build(self):
cmake = CMake(self)
cmake.configure(source_folder="src/")
print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.deps_cpp_info["TestLib"]))
cmake.build()
def imports(self):
self.copy("*.dll", dst="bin", src="bin")
self.copy("*.dylib*", dst="bin", src="lib")
self.copy('*.so*', dst='bin', src='lib')
def test(self):
if not tools.cross_building(self.settings):
os.chdir("bin")
self.run(".%sexample" % os.sep)
def package(self):
self.copy("*.h", dst="include", src="src")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
|
[
"from conans import ConanFile, CMake, tools\nimport os\n\nclass Demo(ConanFile):\n name = \"Demo\"\n version = \"0.1\"\n license = \"<Put the package license here>\"\n url = \"<Package recipe repository url here, for issues about the package>\"\n description = \"<Description of Testlib here>\"\n settings = \"os\", \"compiler\", \"build_type\", \"arch\"\n options = {\"shared\": [True, False]}\n default_options = \"shared=False\"\n generators = \"cmake\"\n exports_sources = \"src/*\"\n requires = \"TestLib/0.1@gbmhunter/testing\"\n\n def build(self):\n cmake = CMake(self) \n cmake.configure(source_folder=\"src/\")\n\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.deps_cpp_info[\"TestLib\"]))\n cmake.build()\n\n def imports(self):\n self.copy(\"*.dll\", dst=\"bin\", src=\"bin\")\n self.copy(\"*.dylib*\", dst=\"bin\", src=\"lib\")\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir(\"bin\")\n self.run(\".%sexample\" % os.sep)\n\n def package(self):\n self.copy(\"*.h\", dst=\"include\", src=\"src\")\n self.copy(\"*.lib\", dst=\"lib\", keep_path=False)\n self.copy(\"*.dll\", dst=\"bin\", keep_path=False)\n self.copy(\"*.dylib*\", dst=\"lib\", keep_path=False)\n self.copy(\"*.so\", dst=\"lib\", keep_path=False)\n self.copy(\"*.a\", dst=\"lib\", keep_path=False)",
"from conans import ConanFile, CMake, tools\nimport os\n\n\nclass Demo(ConanFile):\n name = 'Demo'\n version = '0.1'\n license = '<Put the package license here>'\n url = '<Package recipe repository url here, for issues about the package>'\n description = '<Description of Testlib here>'\n settings = 'os', 'compiler', 'build_type', 'arch'\n options = {'shared': [True, False]}\n default_options = 'shared=False'\n generators = 'cmake'\n exports_sources = 'src/*'\n requires = 'TestLib/0.1@gbmhunter/testing'\n\n def build(self):\n cmake = CMake(self)\n cmake.configure(source_folder='src/')\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.\n deps_cpp_info['TestLib']))\n cmake.build()\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n name = 'Demo'\n version = '0.1'\n license = '<Put the package license here>'\n url = '<Package recipe repository url here, for issues about the package>'\n description = '<Description of Testlib here>'\n settings = 'os', 'compiler', 'build_type', 'arch'\n options = {'shared': [True, False]}\n default_options = 'shared=False'\n generators = 'cmake'\n exports_sources = 'src/*'\n requires = 'TestLib/0.1@gbmhunter/testing'\n\n def build(self):\n cmake = CMake(self)\n cmake.configure(source_folder='src/')\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.\n deps_cpp_info['TestLib']))\n cmake.build()\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def build(self):\n cmake = CMake(self)\n cmake.configure(source_folder='src/')\n print('BLAHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH = ' + str(self.\n deps_cpp_info['TestLib']))\n cmake.build()\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n\n def test(self):\n if not tools.cross_building(self.settings):\n os.chdir('bin')\n self.run('.%sexample' % os.sep)\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def imports(self):\n self.copy('*.dll', dst='bin', src='bin')\n self.copy('*.dylib*', dst='bin', src='lib')\n self.copy('*.so*', dst='bin', src='lib')\n <function token>\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n\n def package(self):\n self.copy('*.h', dst='include', src='src')\n self.copy('*.lib', dst='lib', keep_path=False)\n self.copy('*.dll', dst='bin', keep_path=False)\n self.copy('*.dylib*', dst='lib', keep_path=False)\n self.copy('*.so', dst='lib', keep_path=False)\n self.copy('*.a', dst='lib', keep_path=False)\n",
"<import token>\n\n\nclass Demo(ConanFile):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,729 |
b4d48427dddc7c0240cf05c003cbf7b0163279ee
|
from django.contrib import admin
from .models import (AddressLink, Address, Child, Citation,
Configuration, Event, Exclusion, FactType,
Family, Group, Label, LinkAncestry,
Link, MediaLink, Multimedia, Name,
Person, Place, ResearchItem, Research,
Role, Source, SourceTemplate, Url,
Witness)
from . import EXODUS_DB_NAME
from .utils.admin import MultiDBModelAdmin
from .utils.rootsmagic import read_and_pprint_date
class RootsMagicModelAdmin(MultiDBModelAdmin):
using = EXODUS_DB_NAME
class AddressLinkAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"address",
"owner_id",
"address_number",
"details",
]
class AddressAdmin(RootsMagicModelAdmin):
pass
class ChildAdmin(RootsMagicModelAdmin):
list_display = [
"record_id",
"child",
"family",
"father_relationship",
"mother_relationship",
"child_order",
"is_private",
"father_proof",
"mother_proof",
"note",
]
raw_id_fields = [
'child',
'family',
]
class CitationAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"source_id",
"owner_id",
"quality",
"is_private",
"comments",
"actual_text",
"reference_number",
"flags",
# "fields",
]
class ConfigurationAdmin(RootsMagicModelAdmin):
pass
class EventAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"event_type",
"owner",
"owner_type",
"owner_id",
"family",
"place",
"site",
# "date",
"pretty_date",
"sort_date",
"is_primary",
"is_private",
"proof",
"status",
"edit_date",
"sentence",
# "details",
# "note",
]
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = "Date"
class ExclusionAdmin(RootsMagicModelAdmin):
pass
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"name",
"abbreviation",
"gedcom_tag",
"use_value",
"use_date",
"use_place",
"sentence",
"flags",
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"father",
"mother",
"child",
"husband_order",
"wife_order",
"is_private",
"proof",
"spouse_label",
"father_label",
"mother_label",
# "note",
]
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"ext_system",
"link_type",
"rootsmagic",
"ext_id",
"modified",
"ext_version",
"ext_date",
"status",
"note",
]
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = [
"link_id",
"media",
"owner",
"owner_type",
"owner_id",
"is_primary",
"include_1",
"include_2",
"include_3",
"include_4",
"sort_order",
"rectangle_left",
"rectangle_top",
"rectangle_right",
"rectangle_bottom",
"note",
"caption",
"reference_number",
"date",
"sort_date",
# "description",
]
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"media_type",
"media_path",
"media_file",
"url",
"thumbnail",
"caption",
"reference_number",
# "date",
"pretty_date",
"sort_date",
# "description",
]
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = "Date"
class NameAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner",
"surname",
"given",
"prefix",
"suffix",
"nickname",
"name_type",
"date",
"sort_date",
"is_primary",
"is_private",
"proof",
"edit_date",
"sentence",
# "note",
"birth_year",
"death_year",
]
class PersonAdmin(RootsMagicModelAdmin):
list_display = [
"id",
'primary_name',
"sex_short",
"edit_date",
"parent",
"spouse",
"color",
"relate_1",
"relate_2",
"flags",
"is_living",
"is_private",
"proof",
"unique_id",
"bookmark",
# "note",
]
class PlaceAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"place_type",
"name",
"abbreviation",
"normalized",
"master_place",
# "latitude",
# "longitude",
"pretty_latlong",
"exact_latituate_longitude",
"note",
]
raw_id_fields = [
"master_place"
]
readonly_fields = [
"pretty_latlong"
]
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"role_name",
"event_type",
"role_type",
"sentence",
]
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"owner_id",
"link_type",
"name",
"url",
"note",
]
class WitnessAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"event",
"person",
"witness_order",
"role",
"sentence",
"note",
"given",
"surname",
"prefix",
"suffix",
]
admin.site.register(AddressLink, AddressLinkAdmin)
admin.site.register(Address, AddressAdmin)
admin.site.register(Child, ChildAdmin)
admin.site.register(Citation, CitationAdmin)
admin.site.register(Configuration, ConfigurationAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Exclusion, ExclusionAdmin)
admin.site.register(FactType, FactTypeAdmin)
admin.site.register(Family, FamilyAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Label, LabelAdmin)
admin.site.register(LinkAncestry, LinkAncestryAdmin)
admin.site.register(Link, LinkAdmin)
admin.site.register(MediaLink, MediaLinkAdmin)
admin.site.register(Multimedia, MultimediaAdmin)
admin.site.register(Name, NameAdmin)
admin.site.register(Person, PersonAdmin)
admin.site.register(Place, PlaceAdmin)
admin.site.register(ResearchItem, ResearchItemAdmin)
admin.site.register(Research, ResearchAdmin)
admin.site.register(Role, RoleAdmin)
admin.site.register(Source, SourceAdmin)
admin.site.register(SourceTemplate, SourceTemplateAdmin)
admin.site.register(Url, UrlAdmin)
admin.site.register(Witness, WitnessAdmin)
|
[
"from django.contrib import admin\n\nfrom .models import (AddressLink, Address, Child, Citation,\n Configuration, Event, Exclusion, FactType,\n Family, Group, Label, LinkAncestry,\n Link, MediaLink, Multimedia, Name,\n Person, Place, ResearchItem, Research,\n Role, Source, SourceTemplate, Url,\n Witness)\n\nfrom . import EXODUS_DB_NAME\nfrom .utils.admin import MultiDBModelAdmin\nfrom .utils.rootsmagic import read_and_pprint_date\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n using = EXODUS_DB_NAME\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"address\",\n \"owner_id\",\n \"address_number\",\n \"details\",\n ]\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = [\n \"record_id\",\n \"child\",\n \"family\",\n \"father_relationship\",\n \"mother_relationship\",\n \"child_order\",\n \"is_private\",\n \"father_proof\",\n \"mother_proof\",\n \"note\",\n ]\n raw_id_fields = [\n 'child',\n 'family',\n ]\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"source_id\",\n \"owner_id\",\n \"quality\",\n \"is_private\",\n \"comments\",\n \"actual_text\",\n \"reference_number\",\n \"flags\",\n # \"fields\",\n ]\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"event_type\",\n \"owner\",\n \"owner_type\",\n \"owner_id\",\n \"family\",\n \"place\",\n \"site\",\n # \"date\",\n \"pretty_date\",\n \"sort_date\",\n \"is_primary\",\n \"is_private\",\n \"proof\",\n \"status\",\n \"edit_date\",\n \"sentence\",\n # \"details\",\n # \"note\",\n ]\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = \"Date\"\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"name\",\n \"abbreviation\",\n \"gedcom_tag\",\n \"use_value\",\n \"use_date\",\n \"use_place\",\n \"sentence\",\n \"flags\",\n ]\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"father\",\n \"mother\",\n \"child\",\n \"husband_order\",\n \"wife_order\",\n \"is_private\",\n \"proof\",\n \"spouse_label\",\n \"father_label\",\n \"mother_label\",\n # \"note\",\n ]\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"ext_system\",\n \"link_type\",\n \"rootsmagic\",\n \"ext_id\",\n \"modified\",\n \"ext_version\",\n \"ext_date\",\n \"status\",\n \"note\",\n ]\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"link_id\",\n \"media\",\n \"owner\",\n \"owner_type\",\n \"owner_id\",\n \"is_primary\",\n \"include_1\",\n \"include_2\",\n \"include_3\",\n \"include_4\",\n \"sort_order\",\n \"rectangle_left\",\n \"rectangle_top\",\n \"rectangle_right\",\n \"rectangle_bottom\",\n \"note\",\n \"caption\",\n \"reference_number\",\n \"date\",\n \"sort_date\",\n # \"description\",\n ]\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"media_type\",\n \"media_path\",\n \"media_file\",\n \"url\",\n \"thumbnail\",\n \"caption\",\n \"reference_number\",\n # \"date\",\n \"pretty_date\",\n \"sort_date\",\n # \"description\",\n ]\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = \"Date\"\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner\",\n \"surname\",\n \"given\",\n \"prefix\",\n \"suffix\",\n \"nickname\",\n \"name_type\",\n \"date\",\n \"sort_date\",\n \"is_primary\",\n \"is_private\",\n \"proof\",\n \"edit_date\",\n \"sentence\",\n # \"note\",\n \"birth_year\",\n \"death_year\",\n ]\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n 'primary_name',\n \"sex_short\",\n \"edit_date\",\n \"parent\",\n \"spouse\",\n \"color\",\n \"relate_1\",\n \"relate_2\",\n \"flags\",\n \"is_living\",\n \"is_private\",\n \"proof\",\n \"unique_id\",\n \"bookmark\",\n # \"note\",\n ]\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"place_type\",\n \"name\",\n \"abbreviation\",\n \"normalized\",\n \"master_place\",\n # \"latitude\",\n # \"longitude\",\n \"pretty_latlong\",\n \"exact_latituate_longitude\",\n \"note\",\n ]\n raw_id_fields = [\n \"master_place\"\n ]\n readonly_fields = [\n \"pretty_latlong\"\n ]\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"role_name\",\n \"event_type\",\n \"role_type\",\n \"sentence\",\n ]\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"owner_id\",\n \"link_type\",\n \"name\",\n \"url\",\n \"note\",\n ]\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"event\",\n \"person\",\n \"witness_order\",\n \"role\",\n \"sentence\",\n \"note\",\n \"given\",\n \"surname\",\n \"prefix\",\n \"suffix\",\n ]\n\n\n\nadmin.site.register(AddressLink, AddressLinkAdmin)\nadmin.site.register(Address, AddressAdmin)\nadmin.site.register(Child, ChildAdmin)\nadmin.site.register(Citation, CitationAdmin)\nadmin.site.register(Configuration, ConfigurationAdmin)\nadmin.site.register(Event, EventAdmin)\nadmin.site.register(Exclusion, ExclusionAdmin)\nadmin.site.register(FactType, FactTypeAdmin)\nadmin.site.register(Family, FamilyAdmin)\nadmin.site.register(Group, GroupAdmin)\nadmin.site.register(Label, LabelAdmin)\nadmin.site.register(LinkAncestry, LinkAncestryAdmin)\nadmin.site.register(Link, LinkAdmin)\nadmin.site.register(MediaLink, MediaLinkAdmin)\nadmin.site.register(Multimedia, MultimediaAdmin)\nadmin.site.register(Name, NameAdmin)\nadmin.site.register(Person, PersonAdmin)\nadmin.site.register(Place, PlaceAdmin)\nadmin.site.register(ResearchItem, ResearchItemAdmin)\nadmin.site.register(Research, ResearchAdmin)\nadmin.site.register(Role, RoleAdmin)\nadmin.site.register(Source, SourceAdmin)\nadmin.site.register(SourceTemplate, SourceTemplateAdmin)\nadmin.site.register(Url, UrlAdmin)\nadmin.site.register(Witness, WitnessAdmin)\n",
"from django.contrib import admin\nfrom .models import AddressLink, Address, Child, Citation, Configuration, Event, Exclusion, FactType, Family, Group, Label, LinkAncestry, Link, MediaLink, Multimedia, Name, Person, Place, ResearchItem, Research, Role, Source, SourceTemplate, Url, Witness\nfrom . import EXODUS_DB_NAME\nfrom .utils.admin import MultiDBModelAdmin\nfrom .utils.rootsmagic import read_and_pprint_date\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n using = EXODUS_DB_NAME\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'address', 'owner_id',\n 'address_number', 'details']\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\nadmin.site.register(AddressLink, AddressLinkAdmin)\nadmin.site.register(Address, AddressAdmin)\nadmin.site.register(Child, ChildAdmin)\nadmin.site.register(Citation, CitationAdmin)\nadmin.site.register(Configuration, ConfigurationAdmin)\nadmin.site.register(Event, EventAdmin)\nadmin.site.register(Exclusion, ExclusionAdmin)\nadmin.site.register(FactType, FactTypeAdmin)\nadmin.site.register(Family, FamilyAdmin)\nadmin.site.register(Group, GroupAdmin)\nadmin.site.register(Label, LabelAdmin)\nadmin.site.register(LinkAncestry, LinkAncestryAdmin)\nadmin.site.register(Link, LinkAdmin)\nadmin.site.register(MediaLink, MediaLinkAdmin)\nadmin.site.register(Multimedia, MultimediaAdmin)\nadmin.site.register(Name, NameAdmin)\nadmin.site.register(Person, PersonAdmin)\nadmin.site.register(Place, PlaceAdmin)\nadmin.site.register(ResearchItem, ResearchItemAdmin)\nadmin.site.register(Research, ResearchAdmin)\nadmin.site.register(Role, RoleAdmin)\nadmin.site.register(Source, SourceAdmin)\nadmin.site.register(SourceTemplate, SourceTemplateAdmin)\nadmin.site.register(Url, UrlAdmin)\nadmin.site.register(Witness, WitnessAdmin)\n",
"<import token>\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n using = EXODUS_DB_NAME\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'address', 'owner_id',\n 'address_number', 'details']\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\nadmin.site.register(AddressLink, AddressLinkAdmin)\nadmin.site.register(Address, AddressAdmin)\nadmin.site.register(Child, ChildAdmin)\nadmin.site.register(Citation, CitationAdmin)\nadmin.site.register(Configuration, ConfigurationAdmin)\nadmin.site.register(Event, EventAdmin)\nadmin.site.register(Exclusion, ExclusionAdmin)\nadmin.site.register(FactType, FactTypeAdmin)\nadmin.site.register(Family, FamilyAdmin)\nadmin.site.register(Group, GroupAdmin)\nadmin.site.register(Label, LabelAdmin)\nadmin.site.register(LinkAncestry, LinkAncestryAdmin)\nadmin.site.register(Link, LinkAdmin)\nadmin.site.register(MediaLink, MediaLinkAdmin)\nadmin.site.register(Multimedia, MultimediaAdmin)\nadmin.site.register(Name, NameAdmin)\nadmin.site.register(Person, PersonAdmin)\nadmin.site.register(Place, PlaceAdmin)\nadmin.site.register(ResearchItem, ResearchItemAdmin)\nadmin.site.register(Research, ResearchAdmin)\nadmin.site.register(Role, RoleAdmin)\nadmin.site.register(Source, SourceAdmin)\nadmin.site.register(SourceTemplate, SourceTemplateAdmin)\nadmin.site.register(Url, UrlAdmin)\nadmin.site.register(Witness, WitnessAdmin)\n",
"<import token>\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n using = EXODUS_DB_NAME\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'address', 'owner_id',\n 'address_number', 'details']\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n <assignment token>\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'address', 'owner_id',\n 'address_number', 'details']\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'address', 'owner_id',\n 'address_number', 'details']\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = ['record_id', 'child', 'family', 'father_relationship',\n 'mother_relationship', 'child_order', 'is_private', 'father_proof',\n 'mother_proof', 'note']\n raw_id_fields = ['child', 'family']\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n <assignment token>\n <assignment token>\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n <assignment token>\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n <assignment token>\n <function token>\n <assignment token>\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n <assignment token>\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n <assignment token>\n <function token>\n <assignment token>\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n <assignment token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<code token>\n"
] | false |
9,730 |
3a96ede91069df0c71905415e598dbbd9d3056fd
|
import os
import sys
import re
import traceback
import logging
import queue
import threading
from logging.handlers import TimedRotatingFileHandler
from pathlib import Path
import click
import inotify.adapters
from inotify.constants import (IN_ATTRIB, IN_DELETE, IN_MOVED_FROM,
IN_MOVED_TO, IN_CLOSE_WRITE)
from lxd_image_server.simplestreams.images import Images
from lxd_image_server.tools.cert import generate_cert
from lxd_image_server.tools.operation import Operations
from lxd_image_server.tools.mirror import MirrorManager
from lxd_image_server.tools.config import Config
logger = logging.getLogger('lxd-image-server')
event_queue = queue.Queue()
def threaded(fn):
def wrapper(*args, **kwargs):
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
return wrapper
def configure_log(log_file, verbose=False):
filename = log_file
if log_file == 'STDOUT':
handler = logging.StreamHandler(sys.stdout)
elif log_file == 'STDERR':
handler = logging.StreamHandler(sys.stderr)
else:
handler = TimedRotatingFileHandler(
filename,
when="d", interval=7, backupCount=4)
formatter = logging.Formatter('[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')
handler.setFormatter(formatter)
logger.setLevel('DEBUG' if verbose else 'INFO')
logger.addHandler(handler)
def needs_update(events):
modified_files = []
for event in list(events):
if re.match('\d{8}_\d{2}:\d{2}', event[3]) or \
any(k in event[1]
for k in ('IN_MOVED_FROM', 'IN_MOVED_TO',
'IN_DELETE', 'IN_CLOSE_WRITE')):
logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'
.format(event[2], event[3], event[1]))
modified_files.append(event)
return modified_files
def config_inotify_setup(skipWatchingNonExistent: bool) -> inotify.adapters.Inotify:
i = inotify.adapters.Inotify()
watchedDirs = {}
for p in Config.paths:
if os.path.exists(p):
if os.path.isfile(p):
logger.debug("Watching existing config file {}".format(p))
i.add_watch(p, mask= inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_DELETE)
else:
logger.debug("Watching existing config directory {}".format(p))
i.add_watch(p) # SEEME: all events?
elif not skipWatchingNonExistent:
(d, n) = os.path.split(p)
while not os.path.exists(d):
(d, n) = os.path.split(d)
if d not in watchedDirs:
i.add_watch(d, inotify.constants.IN_DELETE | inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)
logger.debug("Watching directory {} as base for {}".format(d, p))
watchedDirs[d] = True
return i
@threaded
def update_config(skipWatchingNonExistent = True):
i = config_inotify_setup(skipWatchingNonExistent)
while True:
reload = False
for event in i.event_gen(yield_nones=False):
(_, mask, dir, file) = event
fp = os.path.join(dir, file).rstrip(os.path.sep)
for p in Config.paths:
if p == fp or (dir == p):
reload = True
break
if reload:
break
if reload:
logger.debug("Will reload configuration")
Config.reload_data()
i = config_inotify_setup()
MirrorManager.update_mirror_list()
else:
logger.debug("No need to reload configuration")
@threaded
def update_metadata(img_dir, streams_dir):
MirrorManager.img_dir = img_dir
MirrorManager.update_mirror_list()
while True:
events = event_queue.get()
ops = Operations(events, str(Path(img_dir).resolve()))
if ops:
logger.info('Updating server: %s', ','.join(
str(x) for x in ops.ops))
images = Images(str(Path(streams_dir).resolve()), logger=logger)
images.update(ops.ops)
images.save()
MirrorManager.update()
logger.info('Server updated')
def fix_permissions(path):
Path(path).chmod(0o775)
for root, dirs, files in os.walk(path):
for elem in files:
Path(root, elem).chmod(0o775)
for elem in dirs:
Path(root, elem).chmod(0o775)
@click.group()
@click.option('--log-file', default='./lxd-image-server.log',
show_default=True)
@click.option('--verbose', help='Sets log level to debug',
is_flag=True, default=False)
def cli(log_file, verbose):
configure_log(log_file, verbose)
@cli.command()
@click.option('--img_dir', default='/var/www/simplestreams/images',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True),
callback=lambda ctx, param, val: Path(val))
@click.option('--streams_dir', default='/var/www/simplestreams/streams/v1',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True))
@click.pass_context
def update(ctx, img_dir, streams_dir):
logger.info('Updating server')
img_dir = Path(img_dir).expanduser().resolve()
streams_dir = Path(streams_dir).expanduser().resolve()
images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=logger)
# Generate a fake event to update all tree
fake_events = [
(None, ['IN_ISDIR', 'IN_CREATE'],
str(img_dir.parent), str(img_dir.name))
]
operations = Operations(fake_events, str(img_dir))
images.update(operations.ops)
images.save()
logger.info('Server updated')
@cli.command()
@click.option('--root_dir', default='/var/www/simplestreams',
show_default=True)
@click.option('--ssl_dir', default='/etc/nginx/ssl', show_default=True,
callback=lambda ctx, param, val: Path(val))
@click.option('--ssl_skip', default=False, is_flag=True)
@click.option('--nginx_skip', default=False, is_flag=True)
@click.pass_context
def init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):
root_dir = Path(root_dir).expanduser().resolve()
if not Path(root_dir).exists():
logger.error('Root directory does not exists')
else:
if nginx_skip:
ssl_skip = True
if not ssl_skip:
if not ssl_dir.exists():
os.makedirs(str(ssl_dir))
if not (ssl_dir / 'nginx.key').exists():
generate_cert(str(ssl_dir))
img_dir = str(Path(root_dir, 'images'))
streams_dir = str(Path(root_dir, 'streams/v1'))
if not Path(img_dir).exists():
os.makedirs(img_dir)
if not Path(streams_dir).exists():
os.makedirs(streams_dir)
if not nginx_skip:
conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')
if not conf_path.exists():
conf_path.symlink_to(
'/etc/nginx/sites-available/simplestreams.conf')
os.system('nginx -s reload')
if not Path(root_dir, 'streams', 'v1', 'images.json').exists():
ctx.invoke(update, img_dir=Path(root_dir, 'images'),
streams_dir=Path(root_dir, 'streams', 'v1'))
fix_permissions(img_dir)
fix_permissions(streams_dir)
@cli.command()
@click.option('--img_dir', default='/var/www/simplestreams/images',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True))
@click.option('--streams_dir', default='/var/www/simplestreams/streams/v1',
type=click.Path(exists=True, file_okay=False,
resolve_path=True), show_default=True)
@click.option('--skip-watch-config-non-existent', default=False, type=bool, is_flag=True)
@click.pass_context
def watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):
path_img_dir = str(Path(img_dir).expanduser().resolve())
path_streams_dir = str(Path(streams_dir).expanduser().resolve())
logger.info("Starting watch process")
Config.load_data()
# Lauch threads
# SEEME: in case an event will come from watching config files, there is a race condition between update_config
# thread using indirectly MirrorManager.img_dir and thread update_metadata setting MirrorManager.img_dir
# Also, race condition on calling MirrorManager.update_mirror_list() in both threads.
update_config(skip_watch_config_non_existent)
update_metadata(path_img_dir, path_streams_dir)
logger.debug("Watching image directory {}".format(path_img_dir))
i = inotify.adapters.InotifyTree(path_img_dir,
mask=(IN_ATTRIB | IN_DELETE |
IN_MOVED_FROM | IN_MOVED_TO |
IN_CLOSE_WRITE))
while True:
events = i.event_gen(yield_nones=False, timeout_s=15)
files_changed = needs_update(events)
if files_changed:
event_queue.put(files_changed)
def main():
try:
sys.exit(cli())
except Exception:
logger.error(traceback.format_exc())
sys.exit(1)
if __name__ == '__main__':
main()
|
[
"import os\nimport sys\nimport re\nimport traceback\nimport logging\nimport queue\nimport threading\nfrom logging.handlers import TimedRotatingFileHandler\nfrom pathlib import Path\nimport click\nimport inotify.adapters\nfrom inotify.constants import (IN_ATTRIB, IN_DELETE, IN_MOVED_FROM,\n IN_MOVED_TO, IN_CLOSE_WRITE)\nfrom lxd_image_server.simplestreams.images import Images\nfrom lxd_image_server.tools.cert import generate_cert\nfrom lxd_image_server.tools.operation import Operations\nfrom lxd_image_server.tools.mirror import MirrorManager\nfrom lxd_image_server.tools.config import Config\n\n\nlogger = logging.getLogger('lxd-image-server')\nevent_queue = queue.Queue()\n\ndef threaded(fn):\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(\n filename,\n when=\"d\", interval=7, backupCount=4)\n formatter = logging.Formatter('[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\d{8}_\\d{2}:\\d{2}', event[3]) or \\\n any(k in event[1]\n for k in ('IN_MOVED_FROM', 'IN_MOVED_TO',\n 'IN_DELETE', 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'\n .format(event[2], event[3], event[1]))\n modified_files.append(event)\n\n return modified_files\n\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool) -> inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug(\"Watching existing config file {}\".format(p))\n i.add_watch(p, mask= inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_DELETE)\n else:\n logger.debug(\"Watching existing config directory {}\".format(p))\n i.add_watch(p) # SEEME: all events?\n elif not skipWatchingNonExistent:\n (d, n) = os.path.split(p)\n while not os.path.exists(d):\n (d, n) = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug(\"Watching directory {} as base for {}\".format(d, p))\n watchedDirs[d] = True\n\n return i\n\n@threaded\ndef update_config(skipWatchingNonExistent = True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n (_, mask, dir, file) = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or (dir == p):\n reload = True\n break\n if reload:\n break\n\n if reload:\n logger.debug(\"Will reload configuration\")\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug(\"No need to reload configuration\")\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(\n str(x) for x in ops.ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(0o775)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(0o775)\n for elem in dirs:\n Path(root, elem).chmod(0o775)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log',\n show_default=True)\[email protected]('--verbose', help='Sets log level to debug',\n is_flag=True, default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True),\n callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=logger)\n\n # Generate a fake event to update all tree\n fake_events = [\n (None, ['IN_ISDIR', 'IN_CREATE'],\n str(img_dir.parent), str(img_dir.name))\n ]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams',\n show_default=True)\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True), show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool, is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info(\"Starting watch process\")\n\n Config.load_data()\n # Lauch threads\n # SEEME: in case an event will come from watching config files, there is a race condition between update_config\n # thread using indirectly MirrorManager.img_dir and thread update_metadata setting MirrorManager.img_dir\n # Also, race condition on calling MirrorManager.update_mirror_list() in both threads.\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug(\"Watching image directory {}\".format(path_img_dir))\n\n i = inotify.adapters.InotifyTree(path_img_dir,\n mask=(IN_ATTRIB | IN_DELETE |\n IN_MOVED_FROM | IN_MOVED_TO |\n IN_CLOSE_WRITE))\n\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"import os\nimport sys\nimport re\nimport traceback\nimport logging\nimport queue\nimport threading\nfrom logging.handlers import TimedRotatingFileHandler\nfrom pathlib import Path\nimport click\nimport inotify.adapters\nfrom inotify.constants import IN_ATTRIB, IN_DELETE, IN_MOVED_FROM, IN_MOVED_TO, IN_CLOSE_WRITE\nfrom lxd_image_server.simplestreams.images import Images\nfrom lxd_image_server.tools.cert import generate_cert\nfrom lxd_image_server.tools.operation import Operations\nfrom lxd_image_server.tools.mirror import MirrorManager\nfrom lxd_image_server.tools.config import Config\nlogger = logging.getLogger('lxd-image-server')\nevent_queue = queue.Queue()\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\nlogger = logging.getLogger('lxd-image-server')\nevent_queue = queue.Queue()\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\n<assignment token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\n<assignment token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\n<function token>\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\n<function token>\n<function token>\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,731 |
0726a4fa3af196e2ba1592019f09afb0e7bb47d7
|
import os
import requests
def download(url: str, dest_folder: str):
#https://stackoverflow.com/a/56951135/8761164
if not os.path.exists(dest_folder):
os.makedirs(dest_folder) # create folder if it does not exist
filename = url.split('/')[-1].replace(" ", "_") # be careful with file names
file_path = os.path.join(dest_folder, filename)
r = requests.get(url, stream=True)
if r.ok:
print("saving to", os.path.abspath(file_path))
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024 * 8):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
else:
print("Download failed: status code {}\n{}".format(r.status_code, r.text))
def parse_lat(lat: int):
lat_str = 'N' if lat >= 0 else 'S'
if 10 > lat > -10:
lat_str += '0'
lat_str += str(abs(lat))
return lat_str
def parse_long(long: int):
long_str = 'E' if long >= 0 else 'W'
if 100 > long > -100:
long_str += '0'
if 10 > long > -10:
long_str += '0'
long_str += str(abs(long))
return long_str
if __name__=='__main__':
for lat in range(47, 21, -1):
for long in range(-14, 43, 1):
#print(parse_lat(lat), parse_long(long))
#print(f"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip")
download(f"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip", dest_folder="/media/data-ext/aster-gdem")
|
[
"import os\nimport requests\n\ndef download(url: str, dest_folder: str):\n #https://stackoverflow.com/a/56951135/8761164\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder) # create folder if it does not exist\n\n filename = url.split('/')[-1].replace(\" \", \"_\") # be careful with file names\n file_path = os.path.join(dest_folder, filename)\n\n r = requests.get(url, stream=True)\n\n if r.ok:\n print(\"saving to\", os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print(\"Download failed: status code {}\\n{}\".format(r.status_code, r.text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__=='__main__':\n\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n #print(parse_lat(lat), parse_long(long))\n #print(f\"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip\")\n download(f\"https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip\", dest_folder=\"/media/data-ext/aster-gdem\")",
"import os\nimport requests\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__ == '__main__':\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n download(\n f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'\n , dest_folder='/media/data-ext/aster-gdem')\n",
"<import token>\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\nif __name__ == '__main__':\n for lat in range(47, 21, -1):\n for long in range(-14, 43, 1):\n download(\n f'https://gdemdl.aster.jspacesystems.or.jp/download/Download_{parse_lat(lat)}{parse_long(long)}.zip'\n , dest_folder='/media/data-ext/aster-gdem')\n",
"<import token>\n\n\ndef download(url: str, dest_folder: str):\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n filename = url.split('/')[-1].replace(' ', '_')\n file_path = os.path.join(dest_folder, filename)\n r = requests.get(url, stream=True)\n if r.ok:\n print('saving to', os.path.abspath(file_path))\n with open(file_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024 * 8):\n if chunk:\n f.write(chunk)\n f.flush()\n os.fsync(f.fileno())\n else:\n print('Download failed: status code {}\\n{}'.format(r.status_code, r\n .text))\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\ndef parse_long(long: int):\n long_str = 'E' if long >= 0 else 'W'\n if 100 > long > -100:\n long_str += '0'\n if 10 > long > -10:\n long_str += '0'\n long_str += str(abs(long))\n return long_str\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef parse_lat(lat: int):\n lat_str = 'N' if lat >= 0 else 'S'\n if 10 > lat > -10:\n lat_str += '0'\n lat_str += str(abs(lat))\n return lat_str\n\n\n<function token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,732 |
9ad36f157abae849a1550cb96e650746d57f491d
|
from collections import Counter
from docx import Document
import docx2txt
plain_text = docx2txt.process("kashmiri.docx")
list_of_words = plain_text.split()
#print(Counter(list_of_words))
counter_list_of_words = Counter(list_of_words)
elements = counter_list_of_words.items()
# for a, b in sorted(elements, key=lambda x: x[1], reverse=True):
# print(a)
# print(b)
doc = Document()
# Create and Name Table Heading
table = doc.add_table(rows=1, cols=2)
cell1 = table.cell(0, 0)
cell1.text = 'Word'
cell2 = table.cell(0, 1)
cell2.text = 'Frequency'
#Iterate over collection elements and append to table craeted
for word, frequency in sorted(elements, key=lambda x: x[1], reverse=True):
cell = table.add_row().cells
cell[0].text = str(word)
cell[1].text = str(frequency)
doc.save("results.docx")
|
[
"from collections import Counter\nfrom docx import Document\nimport docx2txt\n\nplain_text = docx2txt.process(\"kashmiri.docx\")\nlist_of_words = plain_text.split()\n#print(Counter(list_of_words))\ncounter_list_of_words = Counter(list_of_words)\nelements = counter_list_of_words.items()\n# for a, b in sorted(elements, key=lambda x: x[1], reverse=True):\n# print(a)\n# print(b)\n\ndoc = Document()\n# Create and Name Table Heading\ntable = doc.add_table(rows=1, cols=2)\ncell1 = table.cell(0, 0)\ncell1.text = 'Word'\ncell2 = table.cell(0, 1)\ncell2.text = 'Frequency'\n\n#Iterate over collection elements and append to table craeted\nfor word, frequency in sorted(elements, key=lambda x: x[1], reverse=True):\n cell = table.add_row().cells\n cell[0].text = str(word)\n cell[1].text = str(frequency)\ndoc.save(\"results.docx\")",
"from collections import Counter\nfrom docx import Document\nimport docx2txt\nplain_text = docx2txt.process('kashmiri.docx')\nlist_of_words = plain_text.split()\ncounter_list_of_words = Counter(list_of_words)\nelements = counter_list_of_words.items()\ndoc = Document()\ntable = doc.add_table(rows=1, cols=2)\ncell1 = table.cell(0, 0)\ncell1.text = 'Word'\ncell2 = table.cell(0, 1)\ncell2.text = 'Frequency'\nfor word, frequency in sorted(elements, key=lambda x: x[1], reverse=True):\n cell = table.add_row().cells\n cell[0].text = str(word)\n cell[1].text = str(frequency)\ndoc.save('results.docx')\n",
"<import token>\nplain_text = docx2txt.process('kashmiri.docx')\nlist_of_words = plain_text.split()\ncounter_list_of_words = Counter(list_of_words)\nelements = counter_list_of_words.items()\ndoc = Document()\ntable = doc.add_table(rows=1, cols=2)\ncell1 = table.cell(0, 0)\ncell1.text = 'Word'\ncell2 = table.cell(0, 1)\ncell2.text = 'Frequency'\nfor word, frequency in sorted(elements, key=lambda x: x[1], reverse=True):\n cell = table.add_row().cells\n cell[0].text = str(word)\n cell[1].text = str(frequency)\ndoc.save('results.docx')\n",
"<import token>\n<assignment token>\nfor word, frequency in sorted(elements, key=lambda x: x[1], reverse=True):\n cell = table.add_row().cells\n cell[0].text = str(word)\n cell[1].text = str(frequency)\ndoc.save('results.docx')\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
9,733 |
7016a7dda80c0cfae0e15cf239f6ae64eb9004b7
|
# Jeremy Jao
# University of Pittsburgh: DBMI
# 6/18/2013
#
# This is the thing that returns the dictionary of the key. we can edit more code to return different values in the keys (gene) in each dictionary inside the dictionary.
# my sys.argv isn't working in my situation due to my IDE (nor do I not know how it would work.... but yeah........... It's easy to code this.
import cPickle
#import sys
#
#arg = sys.argv[0]
print "Displaying dictionary for " + "MESTIT1"
hi = open("geneDictionary.pickle", "r")
hello = cPickle.load(hi)
print hello["MESTIT1"]
|
[
"# Jeremy Jao\r\n# University of Pittsburgh: DBMI\r\n# 6/18/2013\r\n#\r\n# This is the thing that returns the dictionary of the key. we can edit more code to return different values in the keys (gene) in each dictionary inside the dictionary.\r\n# my sys.argv isn't working in my situation due to my IDE (nor do I not know how it would work.... but yeah........... It's easy to code this.\r\n\r\nimport cPickle\r\n#import sys\r\n#\r\n#arg = sys.argv[0]\r\n\r\nprint \"Displaying dictionary for \" + \"MESTIT1\"\r\n\r\nhi = open(\"geneDictionary.pickle\", \"r\")\r\n\r\nhello = cPickle.load(hi)\r\n\r\nprint hello[\"MESTIT1\"]"
] | true |
9,734 |
b8b20d6c977a6c1df6a592188c6e799f12da6a23
|
##########################################################################################
## Scene Classification ##
## Authors : Chris Andrew, Santhoshini Reddy, Nikath Yasmeen, Sai Hima, Sriya Ragini ##
################################################################### ##
## Description: This project was developed as part of the DIP course at IIIT Sri City ##
## All code is available for free usage for educational purposes ##
## Authors do not authorize commercial use of the source code ##
##########################################################################################
# The following module shuffles the data to enable 10 fold cross-validation analysis
################ Imports ################
from random import shuffle
################ Global ################
path = "data/"
filename = "data"
################ Source ################
# ------------------------------------
f = open(path+filename+".csv",'r')
data = list()
train_data = list()
train_class = list()
# ------------------------------------
for line in f:
l = line.strip()
l = l.split(',')
l = map(float , l)
data.append(l)
# ------------------------------------
f.close()
# ------------------------------------
for i in range(100):
shuffle(data)
# ------------------------------------
for l in data:
train_data.append(l[0:-1])
train_class.append(int(l[-1]))
# ------------------------------------
f = open(path+filename+"_r.csv",'w')
for i in range(len(train_data)):
for entry in train_data[i]:
f.write(str(entry)+',')
# ------------------------------------
f.write(str(train_class[i])+'\n')
# ------------------------------------
f.close()
# ------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------
|
[
"##########################################################################################\n## Scene Classification ##\n## Authors : Chris Andrew, Santhoshini Reddy, Nikath Yasmeen, Sai Hima, Sriya Ragini ##\n################################################################### ##\n## Description: This project was developed as part of the DIP course at IIIT Sri City ##\n## All code is available for free usage for educational purposes ##\n## Authors do not authorize commercial use of the source code ##\n##########################################################################################\n\n# The following module shuffles the data to enable 10 fold cross-validation analysis\n\n################ Imports ################\nfrom random import shuffle\n################ Global ################\npath = \"data/\"\nfilename = \"data\"\n################ Source ################\n# ------------------------------------\nf = open(path+filename+\".csv\",'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\n# ------------------------------------\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float , l)\n data.append(l)\n # ------------------------------------\nf.close()\n# ------------------------------------\nfor i in range(100):\n shuffle(data)\n# ------------------------------------\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\n# ------------------------------------\nf = open(path+filename+\"_r.csv\",'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry)+',')\n # ------------------------------------\n f.write(str(train_class[i])+'\\n')\n # ------------------------------------\nf.close()\n# ------------------------------------------------------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------------------------------------------------------\n",
"from random import shuffle\npath = 'data/'\nfilename = 'data'\nf = open(path + filename + '.csv', 'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\nf = open(path + filename + '_r.csv', 'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"<import token>\npath = 'data/'\nfilename = 'data'\nf = open(path + filename + '.csv', 'r')\ndata = list()\ntrain_data = list()\ntrain_class = list()\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\nf = open(path + filename + '_r.csv', 'w')\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"<import token>\n<assignment token>\nfor line in f:\n l = line.strip()\n l = l.split(',')\n l = map(float, l)\n data.append(l)\nf.close()\nfor i in range(100):\n shuffle(data)\nfor l in data:\n train_data.append(l[0:-1])\n train_class.append(int(l[-1]))\n<assignment token>\nfor i in range(len(train_data)):\n for entry in train_data[i]:\n f.write(str(entry) + ',')\n f.write(str(train_class[i]) + '\\n')\nf.close()\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,735 |
c77ca4aa720b172d75aff2ceda096a4969057a00
|
# coding=utf-8
# __author__ = 'liwenxuan'
import random
chars = "1234567890ABCDEF"
ids = ["{0}{1}{2}{3}".format(i, j, k, l) for i in chars for j in chars for k in chars for l in chars]
def random_peer_id(prefix="F"*8, server_id="0000"):
"""
用于生成随机的peer_id(后四位随机)
:param prefix: 生成的peer_id的前八位, 测试用prefix为"FFFFFFFF"
:param server_id: 区分不同server的标识, 不区分server时, server_id为"0000"
:return:
"""
assert len(str(prefix)) == 8 and len(str(server_id)) == 4
return str(prefix) + str(server_id) + "0"*16 + random.choice(ids) # length: 8+4+16+4 = 32
def random_file_id(file_id_prefix="F"*8, server_id="0000"):
"""
用于生成随机的file_id(后四位随机)
:param file_id_prefix: 生成的file_id的前八位, 测试用prefix为"FFFFFFFF"
:param server_id: 区分不同server的标识, 不区分server时, server_id为"0000"
:return:
"""
assert len(str(file_id_prefix)) <= 8 and len(str(server_id)) == 4
return str(file_id_prefix).ljust(8, "F") + str(server_id) + "F"*16 + random.choice(ids) # length: 8+4+16+4 = 32
if __name__ == "__main__":
pass
print "peer_id", random_peer_id()
print "file_id", random_file_id()
|
[
"# coding=utf-8\n# __author__ = 'liwenxuan'\n\nimport random\n\nchars = \"1234567890ABCDEF\"\nids = [\"{0}{1}{2}{3}\".format(i, j, k, l) for i in chars for j in chars for k in chars for l in chars]\n\n\ndef random_peer_id(prefix=\"F\"*8, server_id=\"0000\"):\n \"\"\"\n 用于生成随机的peer_id(后四位随机)\n :param prefix: 生成的peer_id的前八位, 测试用prefix为\"FFFFFFFF\"\n :param server_id: 区分不同server的标识, 不区分server时, server_id为\"0000\"\n :return:\n \"\"\"\n assert len(str(prefix)) == 8 and len(str(server_id)) == 4\n return str(prefix) + str(server_id) + \"0\"*16 + random.choice(ids) # length: 8+4+16+4 = 32\n\n\ndef random_file_id(file_id_prefix=\"F\"*8, server_id=\"0000\"):\n \"\"\"\n 用于生成随机的file_id(后四位随机)\n :param file_id_prefix: 生成的file_id的前八位, 测试用prefix为\"FFFFFFFF\"\n :param server_id: 区分不同server的标识, 不区分server时, server_id为\"0000\"\n :return:\n \"\"\"\n assert len(str(file_id_prefix)) <= 8 and len(str(server_id)) == 4\n return str(file_id_prefix).ljust(8, \"F\") + str(server_id) + \"F\"*16 + random.choice(ids) # length: 8+4+16+4 = 32\n\n\nif __name__ == \"__main__\":\n pass\n print \"peer_id\", random_peer_id()\n print \"file_id\", random_file_id()\n\n"
] | true |
9,736 |
972c479ea40232e14fbf678ca2ccf9716e473fe8
|
from rest_framework import serializers
from .models import data
from django.contrib.auth.models import User
class dataSerializer(serializers.ModelSerializer):
class Meta:
model = data
fields = ['id','task','duedate','person','done', 'task_user']
class userSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['username', 'password', 'email']
|
[
"from rest_framework import serializers\n\nfrom .models import data\nfrom django.contrib.auth.models import User\n\nclass dataSerializer(serializers.ModelSerializer):\n class Meta:\n model = data\n fields = ['id','task','duedate','person','done', 'task_user']\n\nclass userSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n fields = ['username', 'password', 'email']",
"from rest_framework import serializers\nfrom .models import data\nfrom django.contrib.auth.models import User\n\n\nclass dataSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = data\n fields = ['id', 'task', 'duedate', 'person', 'done', 'task_user']\n\n\nclass userSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = ['username', 'password', 'email']\n",
"<import token>\n\n\nclass dataSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = data\n fields = ['id', 'task', 'duedate', 'person', 'done', 'task_user']\n\n\nclass userSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = ['username', 'password', 'email']\n",
"<import token>\n<class token>\n\n\nclass userSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = User\n fields = ['username', 'password', 'email']\n",
"<import token>\n<class token>\n<class token>\n"
] | false |
9,737 |
0d07ad60c58828ce19153063fb5d7d80135cb9ec
|
from django.http import HttpResponse
from django.shortcuts import render
from dashboard.models import Farmer
import random, json, requests
from django.core import serializers
from collections import namedtuple
def sendSMS(message):
if message:
assert isinstance(message, (str, unicode))
payload = {"From":"18126524546", "To":"+18126524546", "Body":message}
else:
payload = {"From":"18126524546", "To":"+18126524546", "Body":"we curlin"}
r = requests.post("https://api.twilio.com/2010-04-01/Accounts/AC2538516e85acddb63169a9c56019a68a/Messages",
auth=('AC2538516e85acddb63169a9c56019a68a','170945ab2aed0d2ec992a22e9fa41ca4'),
data=payload)
print r.text
def jsonify(content):
response = serializers.serialize('json', content);
return HttpResponse(response, mimetype='application/json')
def getLatestSMS(request):
return jsonify(str(getTwilioSMSData()))
def getTwilioSMSData(request):
r = requests.get('https://api.twilio.com/2010-04-01/Accounts/AC2538516e85acddb63169a9c56019a68a/Messages.json', auth=('AC2538516e85acddb63169a9c56019a68a', '170945ab2aed0d2ec992a22e9fa41ca4'))
all_messages = []
for item in r.json()['messages']:
all_messages.append(SMS(
phone = item['from'],
body = item['body'],
direction = item['direction'], #inbound or outbound
date_created = item['date_created'],
sid = item['sid']
))
last_SMS_id = all_messages[0].sid
farmer = matchPhoneToFarmer(all_messages[0].phone[1:])
return str({'farmer':farmer, 'text':all_messages[0]})
def matchPhoneToFarmer(phone):
print "PHONE: ", phone
for farmer in sample_farmers:
if phone==farmer.phone:
return farmer
return None
def generateSampleData():
sample_names = ("Bob", "Dan", "Chloe", "Lyra", "Dev", "Eric")
sample_land_area = (100, 140, 120, 30, 10, 1500)
sample_phone = ("17792329691","17792329696","17792329691","17792329691","17792329691","17792329691")
sample_lat = (11.1 + random.random()/15,
11.1 + random.random()/15,
11.1 + random.random()/15,
11.1 + random.random()/15,
11.1 + random.random()/15,
11.1 + random.random()/15)
sample_long = (79.646 + random.random()/15,
79.646 + random.random()/15,
79.646 + random.random()/15,
79.646 + random.random()/15,
79.646 + random.random()/15,
79.646)
sample_diseased = (True, False, False, False, True, True)
for i in range(6):
name=sample_names[i]
land_area=sample_land_area[i],
phone = sample_phone[i]
latitude=sample_lat[i],
longitude=sample_long[i],
is_diseased=sample_diseased[i]
sample_farmers.append(Farmer(name=name, land_area=land_area, phone=phone,
latitude=latitude, longitude=longitude,
is_diseased=is_diseased))
def returnFarmerDataJSON(request):
data = []
for item in sample_farmers:
data.append(str(item))
response = serializers.serialize('json', sample_farmers);
return HttpResponse(response, mimetype='application/json')
def dashboard(request):
# sendSMS("yo what's up")
print getTwilioSMSData(request)
context = {}
return render(request, 'dashboard/dashboard.html', context)
sample_farmers = []
generateSampleData()
SMS = namedtuple('SMS', ['phone', 'body', 'direction', 'date_created', 'sid'])
last_SMS_id = ""
|
[
"from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom dashboard.models import Farmer\nimport random, json, requests\nfrom django.core import serializers\n\nfrom collections import namedtuple\n\ndef sendSMS(message):\n if message:\n assert isinstance(message, (str, unicode))\n payload = {\"From\":\"18126524546\", \"To\":\"+18126524546\", \"Body\":message}\n else:\n payload = {\"From\":\"18126524546\", \"To\":\"+18126524546\", \"Body\":\"we curlin\"}\n r = requests.post(\"https://api.twilio.com/2010-04-01/Accounts/AC2538516e85acddb63169a9c56019a68a/Messages\", \n auth=('AC2538516e85acddb63169a9c56019a68a','170945ab2aed0d2ec992a22e9fa41ca4'), \n data=payload)\n print r.text\n\ndef jsonify(content):\n response = serializers.serialize('json', content);\n return HttpResponse(response, mimetype='application/json')\n\ndef getLatestSMS(request):\n return jsonify(str(getTwilioSMSData()))\n\ndef getTwilioSMSData(request):\n r = requests.get('https://api.twilio.com/2010-04-01/Accounts/AC2538516e85acddb63169a9c56019a68a/Messages.json', auth=('AC2538516e85acddb63169a9c56019a68a', '170945ab2aed0d2ec992a22e9fa41ca4'))\n all_messages = []\n for item in r.json()['messages']:\n all_messages.append(SMS(\n phone = item['from'],\n body = item['body'],\n direction = item['direction'], #inbound or outbound\n date_created = item['date_created'],\n sid = item['sid']\n ))\n last_SMS_id = all_messages[0].sid\n farmer = matchPhoneToFarmer(all_messages[0].phone[1:])\n return str({'farmer':farmer, 'text':all_messages[0]})\n\n\ndef matchPhoneToFarmer(phone):\n print \"PHONE: \", phone\n for farmer in sample_farmers:\n if phone==farmer.phone:\n return farmer\n return None\n\n\ndef generateSampleData():\n sample_names = (\"Bob\", \"Dan\", \"Chloe\", \"Lyra\", \"Dev\", \"Eric\")\n sample_land_area = (100, 140, 120, 30, 10, 1500)\n sample_phone = (\"17792329691\",\"17792329696\",\"17792329691\",\"17792329691\",\"17792329691\",\"17792329691\")\n sample_lat = (11.1 + random.random()/15,\n 11.1 + random.random()/15,\n 11.1 + random.random()/15,\n 11.1 + random.random()/15,\n 11.1 + random.random()/15,\n 11.1 + random.random()/15)\n sample_long = (79.646 + random.random()/15,\n 79.646 + random.random()/15,\n 79.646 + random.random()/15,\n 79.646 + random.random()/15,\n 79.646 + random.random()/15,\n 79.646)\n sample_diseased = (True, False, False, False, True, True)\n\n for i in range(6):\n name=sample_names[i]\n land_area=sample_land_area[i],\n phone = sample_phone[i]\n latitude=sample_lat[i], \n longitude=sample_long[i],\n is_diseased=sample_diseased[i]\n sample_farmers.append(Farmer(name=name, land_area=land_area, phone=phone,\n latitude=latitude, longitude=longitude,\n is_diseased=is_diseased))\n\ndef returnFarmerDataJSON(request):\n data = []\n for item in sample_farmers:\n data.append(str(item))\n response = serializers.serialize('json', sample_farmers);\n return HttpResponse(response, mimetype='application/json')\n\ndef dashboard(request):\n # sendSMS(\"yo what's up\")\n print getTwilioSMSData(request)\n context = {}\n return render(request, 'dashboard/dashboard.html', context)\n\nsample_farmers = []\ngenerateSampleData()\nSMS = namedtuple('SMS', ['phone', 'body', 'direction', 'date_created', 'sid'])\nlast_SMS_id = \"\"\n\n"
] | true |
9,738 |
a5b74c31aed103b55404afc538af60c3eb18cb1b
|
"""TcEx Framework Key Value Redis Module"""
class KeyValueRedis:
"""TcEx Key Value Redis Module.
Args:
context (str): The Redis context (hash) for hashed based operations.
redis_client (redis.Client): An instance of redis client.
"""
def __init__(self, context, redis_client):
"""Initialize the Class properties."""
self._context = context
self._redis_client = redis_client
@property
def context(self):
"""Return the current context."""
return self._context
@context.setter
def context(self, context):
"""Set or update the current context."""
self._context = context
def create(self, key, value):
"""Create key/value pair in Redis.
Args:
key (str): The field name (key) for the kv pair in Redis.
value (any): The value for the kv pair in Redis.
Returns:
str: The response from Redis.
"""
return self._redis_client.hset(self.context, key, value)
def delete(self, key):
"""Alias for hdel method.
Args:
key (str): The field name (key) for the kv pair in Redis.
Returns:
str: The response from Redis.
"""
return self._redis_client.hdel(self.context, key)
def hgetall(self):
"""Read data from Redis for the current context.
Returns:
list: The response data from Redis.
"""
return self._redis_client.hgetall(self.context)
def read(self, key):
"""Read data from Redis for the provided key.
Returns:
str: The response data from Redis.
"""
value = self._redis_client.hget(self.context, key)
# convert retrieved bytes to string
if isinstance(value, bytes):
value = value.decode('utf-8')
return value
|
[
"\"\"\"TcEx Framework Key Value Redis Module\"\"\"\n\n\nclass KeyValueRedis:\n \"\"\"TcEx Key Value Redis Module.\n\n Args:\n context (str): The Redis context (hash) for hashed based operations.\n redis_client (redis.Client): An instance of redis client.\n \"\"\"\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n\n @property\n def context(self):\n \"\"\"Return the current context.\"\"\"\n return self._context\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n\n def hgetall(self):\n \"\"\"Read data from Redis for the current context.\n\n Returns:\n list: The response data from Redis.\n \"\"\"\n return self._redis_client.hgetall(self.context)\n\n def read(self, key):\n \"\"\"Read data from Redis for the provided key.\n\n Returns:\n str: The response data from Redis.\n \"\"\"\n value = self._redis_client.hget(self.context, key)\n # convert retrieved bytes to string\n if isinstance(value, bytes):\n value = value.decode('utf-8')\n return value\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n \"\"\"TcEx Key Value Redis Module.\n\n Args:\n context (str): The Redis context (hash) for hashed based operations.\n redis_client (redis.Client): An instance of redis client.\n \"\"\"\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n\n @property\n def context(self):\n \"\"\"Return the current context.\"\"\"\n return self._context\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n\n def hgetall(self):\n \"\"\"Read data from Redis for the current context.\n\n Returns:\n list: The response data from Redis.\n \"\"\"\n return self._redis_client.hgetall(self.context)\n\n def read(self, key):\n \"\"\"Read data from Redis for the provided key.\n\n Returns:\n str: The response data from Redis.\n \"\"\"\n value = self._redis_client.hget(self.context, key)\n if isinstance(value, bytes):\n value = value.decode('utf-8')\n return value\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n\n @property\n def context(self):\n \"\"\"Return the current context.\"\"\"\n return self._context\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n\n def hgetall(self):\n \"\"\"Read data from Redis for the current context.\n\n Returns:\n list: The response data from Redis.\n \"\"\"\n return self._redis_client.hgetall(self.context)\n\n def read(self, key):\n \"\"\"Read data from Redis for the provided key.\n\n Returns:\n str: The response data from Redis.\n \"\"\"\n value = self._redis_client.hget(self.context, key)\n if isinstance(value, bytes):\n value = value.decode('utf-8')\n return value\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n\n def hgetall(self):\n \"\"\"Read data from Redis for the current context.\n\n Returns:\n list: The response data from Redis.\n \"\"\"\n return self._redis_client.hgetall(self.context)\n\n def read(self, key):\n \"\"\"Read data from Redis for the provided key.\n\n Returns:\n str: The response data from Redis.\n \"\"\"\n value = self._redis_client.hget(self.context, key)\n if isinstance(value, bytes):\n value = value.decode('utf-8')\n return value\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n\n def hgetall(self):\n \"\"\"Read data from Redis for the current context.\n\n Returns:\n list: The response data from Redis.\n \"\"\"\n return self._redis_client.hgetall(self.context)\n <function token>\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n\n @context.setter\n def context(self, context):\n \"\"\"Set or update the current context.\"\"\"\n self._context = context\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n <function token>\n <function token>\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n <function token>\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n\n def delete(self, key):\n \"\"\"Alias for hdel method.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hdel(self.context, key)\n <function token>\n <function token>\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n <function token>\n\n def create(self, key, value):\n \"\"\"Create key/value pair in Redis.\n\n Args:\n key (str): The field name (key) for the kv pair in Redis.\n value (any): The value for the kv pair in Redis.\n\n Returns:\n str: The response from Redis.\n \"\"\"\n return self._redis_client.hset(self.context, key, value)\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n\n def __init__(self, context, redis_client):\n \"\"\"Initialize the Class properties.\"\"\"\n self._context = context\n self._redis_client = redis_client\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n\n\nclass KeyValueRedis:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<class token>\n"
] | false |
9,739 |
a67612e8301728d1fb366d7c8909fa830f04bf45
|
#Max Low
#9-25-17
#quiz2.py -- numbers , bigger smaller same, divisible by 3, product and correct person
numone = int(input('Enter a number: '))
numtwo = int(input('Enter a 2nd number: '))
if numone > numtwo:
print('The first number is bigger')
elif numtwo > numone:
print('The second number is bigger')
else:
print('The numbers are the same')
if numone % 3 == 0 and numtwo % 3 == 0:
print('They are both divisible by 3')
elif numone % 3 == 0:
print('Only the first number is divisible by three')
elif numtwo % 3 == 0:
print('Only the second number is divisible by three')
else:
print('Neither number is divisible by 3')
product = int(input('What is the product of your two numbers?: '))
if product == numone*numtwo:
print('correct')
else:
print('incorrect')
|
[
"#Max Low\n#9-25-17\n#quiz2.py -- numbers , bigger smaller same, divisible by 3, product and correct person\n\nnumone = int(input('Enter a number: '))\nnumtwo = int(input('Enter a 2nd number: '))\n\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\n\n \nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\n\nproduct = int(input('What is the product of your two numbers?: '))\nif product == numone*numtwo:\n print('correct')\nelse:\n print('incorrect')",
"numone = int(input('Enter a number: '))\nnumtwo = int(input('Enter a 2nd number: '))\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\nproduct = int(input('What is the product of your two numbers?: '))\nif product == numone * numtwo:\n print('correct')\nelse:\n print('incorrect')\n",
"<assignment token>\nif numone > numtwo:\n print('The first number is bigger')\nelif numtwo > numone:\n print('The second number is bigger')\nelse:\n print('The numbers are the same')\nif numone % 3 == 0 and numtwo % 3 == 0:\n print('They are both divisible by 3')\nelif numone % 3 == 0:\n print('Only the first number is divisible by three')\nelif numtwo % 3 == 0:\n print('Only the second number is divisible by three')\nelse:\n print('Neither number is divisible by 3')\n<assignment token>\nif product == numone * numtwo:\n print('correct')\nelse:\n print('incorrect')\n",
"<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,740 |
cd8d95e2bf433020db2db06a21263f75e3f81331
|
#!/bin/python
"""
len()
lower()
upper()
str()
"""
parrot = "Norwegian Blue"
print len(parrot)
|
[
"#!/bin/python\n\n\"\"\"\nlen()\nlower()\nupper()\nstr()\n\"\"\"\n\nparrot = \"Norwegian Blue\"\nprint len(parrot)\n"
] | true |
9,741 |
2b8b5b893d61d11d2795f5be96fde759256a15e8
|
"""
This is the main script
"""
import datetime
import sqlite3
from sqlite3 import Error
import nltk.sentiment
from chatterbot import ChatBot
from pythonosc import udp_client
def _create_connection(db_file):
""" Create a database connection to the SQLite database """
try:
conn = sqlite3.connect(db_file)
cur = conn.cursor()
# Create a new SQLite table
cur.execute("CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})"
.format(tn=TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN,
time='time', ft='TEXT'))
except Error as err:
print(err)
finally:
conn.commit()
conn.close()
def _log_conversation(db_file, line):
""" Log conversation in SQLite database """
try:
conn = sqlite3.connect(db_file)
cur = conn.cursor()
cur.execute("""INSERT INTO {tn} ({c1}, {c2}, {time}) VALUES ("{v1}", "{v2}", "{now}")""".
format(tn=TABLE_NAME, c1=INPUT_COLUMN, c2=OUTPUT_COLUMN, time='time',
v1=' '.join(line.keys()), v2=' '.join(line.values()),
now=str(datetime.datetime.now())))
conn.commit()
except Error as err:
print(err)
finally:
conn.close()
def main(text):
"""This is the main function to run the CHATBOT, analyse
the responses with nltk and send OSC messages to Pure Data.
"""
# Get CHATBOT response from the user input.
bot_response = CHATBOT.get_response(text).text
print(bot_response)
# Get polarity score from CHATBOT response.
analysis = VADER_ANALYZER.polarity_scores(text)
# Change polarity score relatively to a audible frequency.
freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200
# Send OSC message, to be listened to by pd.
CLIENT.send_message("/filter", freq)
# Log conversation.
exchange = {text: bot_response}
_log_conversation("conversation.db", exchange)
if __name__ == '__main__':
# Set up database
TABLE_NAME = 'conversation_log'
INPUT_COLUMN = 'input_column'
OUTPUT_COLUMN = 'output_column'
CONVERSATION_DB = "conversation.db"
_create_connection(CONVERSATION_DB)
# Set up chatbot.
CHATBOT = ChatBot(
'Sentiment Music Bot',
trainer='chatterbot.trainers.ChatterBotCorpusTrainer')
# Train based on the english corpus.
CHATBOT.train("chatterbot.corpus.english")
# Download lexicon for nltk.
nltk.download('vader_lexicon')
# Set up sentiment analyzer.
VADER_ANALYZER = nltk.sentiment.vader.SentimentIntensityAnalyzer()
# Set up OSC client.
IP = 'localhost'
PORT = 9000
CLIENT = udp_client.SimpleUDPClient(IP, PORT)
# Run chatbot.
while True:
USER_RESPONSE = input("Talk ('exit' to exit): ")
if USER_RESPONSE == 'exit': # Exit on 'exit' string.
break
else:
main(USER_RESPONSE)
|
[
"\"\"\"\nThis is the main script\n\"\"\"\n\nimport datetime\nimport sqlite3\nfrom sqlite3 import Error\nimport nltk.sentiment\nfrom chatterbot import ChatBot\nfrom pythonosc import udp_client\n\n\ndef _create_connection(db_file):\n \"\"\" Create a database connection to the SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n\n # Create a new SQLite table\n cur.execute(\"CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})\"\n .format(tn=TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN,\n time='time', ft='TEXT'))\n\n except Error as err:\n print(err)\n\n finally:\n conn.commit()\n conn.close()\n\n\ndef _log_conversation(db_file, line):\n \"\"\" Log conversation in SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute(\"\"\"INSERT INTO {tn} ({c1}, {c2}, {time}) VALUES (\"{v1}\", \"{v2}\", \"{now}\")\"\"\".\n format(tn=TABLE_NAME, c1=INPUT_COLUMN, c2=OUTPUT_COLUMN, time='time',\n v1=' '.join(line.keys()), v2=' '.join(line.values()),\n now=str(datetime.datetime.now())))\n conn.commit()\n\n except Error as err:\n print(err)\n\n finally:\n conn.close()\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n\n # Get CHATBOT response from the user input.\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n\n # Get polarity score from CHATBOT response.\n analysis = VADER_ANALYZER.polarity_scores(text)\n\n # Change polarity score relatively to a audible frequency.\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n\n # Send OSC message, to be listened to by pd.\n CLIENT.send_message(\"/filter\", freq)\n\n # Log conversation.\n exchange = {text: bot_response}\n _log_conversation(\"conversation.db\", exchange)\n\n\nif __name__ == '__main__':\n\n # Set up database\n TABLE_NAME = 'conversation_log'\n INPUT_COLUMN = 'input_column'\n OUTPUT_COLUMN = 'output_column'\n CONVERSATION_DB = \"conversation.db\"\n _create_connection(CONVERSATION_DB)\n\n # Set up chatbot.\n CHATBOT = ChatBot(\n 'Sentiment Music Bot',\n trainer='chatterbot.trainers.ChatterBotCorpusTrainer')\n\n # Train based on the english corpus.\n CHATBOT.train(\"chatterbot.corpus.english\")\n\n # Download lexicon for nltk.\n nltk.download('vader_lexicon')\n\n # Set up sentiment analyzer.\n VADER_ANALYZER = nltk.sentiment.vader.SentimentIntensityAnalyzer()\n\n # Set up OSC client.\n IP = 'localhost'\n PORT = 9000\n CLIENT = udp_client.SimpleUDPClient(IP, PORT)\n\n # Run chatbot.\n while True:\n USER_RESPONSE = input(\"Talk ('exit' to exit): \")\n if USER_RESPONSE == 'exit': # Exit on 'exit' string.\n break\n else:\n main(USER_RESPONSE)\n",
"<docstring token>\nimport datetime\nimport sqlite3\nfrom sqlite3 import Error\nimport nltk.sentiment\nfrom chatterbot import ChatBot\nfrom pythonosc import udp_client\n\n\ndef _create_connection(db_file):\n \"\"\" Create a database connection to the SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute('CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})'.format(tn\n =TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN, time='time', ft\n ='TEXT'))\n except Error as err:\n print(err)\n finally:\n conn.commit()\n conn.close()\n\n\ndef _log_conversation(db_file, line):\n \"\"\" Log conversation in SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute(\n 'INSERT INTO {tn} ({c1}, {c2}, {time}) VALUES (\"{v1}\", \"{v2}\", \"{now}\")'\n .format(tn=TABLE_NAME, c1=INPUT_COLUMN, c2=OUTPUT_COLUMN, time=\n 'time', v1=' '.join(line.keys()), v2=' '.join(line.values()),\n now=str(datetime.datetime.now())))\n conn.commit()\n except Error as err:\n print(err)\n finally:\n conn.close()\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n analysis = VADER_ANALYZER.polarity_scores(text)\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n CLIENT.send_message('/filter', freq)\n exchange = {text: bot_response}\n _log_conversation('conversation.db', exchange)\n\n\nif __name__ == '__main__':\n TABLE_NAME = 'conversation_log'\n INPUT_COLUMN = 'input_column'\n OUTPUT_COLUMN = 'output_column'\n CONVERSATION_DB = 'conversation.db'\n _create_connection(CONVERSATION_DB)\n CHATBOT = ChatBot('Sentiment Music Bot', trainer=\n 'chatterbot.trainers.ChatterBotCorpusTrainer')\n CHATBOT.train('chatterbot.corpus.english')\n nltk.download('vader_lexicon')\n VADER_ANALYZER = nltk.sentiment.vader.SentimentIntensityAnalyzer()\n IP = 'localhost'\n PORT = 9000\n CLIENT = udp_client.SimpleUDPClient(IP, PORT)\n while True:\n USER_RESPONSE = input(\"Talk ('exit' to exit): \")\n if USER_RESPONSE == 'exit':\n break\n else:\n main(USER_RESPONSE)\n",
"<docstring token>\n<import token>\n\n\ndef _create_connection(db_file):\n \"\"\" Create a database connection to the SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute('CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})'.format(tn\n =TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN, time='time', ft\n ='TEXT'))\n except Error as err:\n print(err)\n finally:\n conn.commit()\n conn.close()\n\n\ndef _log_conversation(db_file, line):\n \"\"\" Log conversation in SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute(\n 'INSERT INTO {tn} ({c1}, {c2}, {time}) VALUES (\"{v1}\", \"{v2}\", \"{now}\")'\n .format(tn=TABLE_NAME, c1=INPUT_COLUMN, c2=OUTPUT_COLUMN, time=\n 'time', v1=' '.join(line.keys()), v2=' '.join(line.values()),\n now=str(datetime.datetime.now())))\n conn.commit()\n except Error as err:\n print(err)\n finally:\n conn.close()\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n analysis = VADER_ANALYZER.polarity_scores(text)\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n CLIENT.send_message('/filter', freq)\n exchange = {text: bot_response}\n _log_conversation('conversation.db', exchange)\n\n\nif __name__ == '__main__':\n TABLE_NAME = 'conversation_log'\n INPUT_COLUMN = 'input_column'\n OUTPUT_COLUMN = 'output_column'\n CONVERSATION_DB = 'conversation.db'\n _create_connection(CONVERSATION_DB)\n CHATBOT = ChatBot('Sentiment Music Bot', trainer=\n 'chatterbot.trainers.ChatterBotCorpusTrainer')\n CHATBOT.train('chatterbot.corpus.english')\n nltk.download('vader_lexicon')\n VADER_ANALYZER = nltk.sentiment.vader.SentimentIntensityAnalyzer()\n IP = 'localhost'\n PORT = 9000\n CLIENT = udp_client.SimpleUDPClient(IP, PORT)\n while True:\n USER_RESPONSE = input(\"Talk ('exit' to exit): \")\n if USER_RESPONSE == 'exit':\n break\n else:\n main(USER_RESPONSE)\n",
"<docstring token>\n<import token>\n\n\ndef _create_connection(db_file):\n \"\"\" Create a database connection to the SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute('CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})'.format(tn\n =TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN, time='time', ft\n ='TEXT'))\n except Error as err:\n print(err)\n finally:\n conn.commit()\n conn.close()\n\n\ndef _log_conversation(db_file, line):\n \"\"\" Log conversation in SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute(\n 'INSERT INTO {tn} ({c1}, {c2}, {time}) VALUES (\"{v1}\", \"{v2}\", \"{now}\")'\n .format(tn=TABLE_NAME, c1=INPUT_COLUMN, c2=OUTPUT_COLUMN, time=\n 'time', v1=' '.join(line.keys()), v2=' '.join(line.values()),\n now=str(datetime.datetime.now())))\n conn.commit()\n except Error as err:\n print(err)\n finally:\n conn.close()\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n analysis = VADER_ANALYZER.polarity_scores(text)\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n CLIENT.send_message('/filter', freq)\n exchange = {text: bot_response}\n _log_conversation('conversation.db', exchange)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n\n\ndef _create_connection(db_file):\n \"\"\" Create a database connection to the SQLite database \"\"\"\n try:\n conn = sqlite3.connect(db_file)\n cur = conn.cursor()\n cur.execute('CREATE TABLE {tn} ({r1}, {r2}, {time} {ft})'.format(tn\n =TABLE_NAME, r1=INPUT_COLUMN, r2=OUTPUT_COLUMN, time='time', ft\n ='TEXT'))\n except Error as err:\n print(err)\n finally:\n conn.commit()\n conn.close()\n\n\n<function token>\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n analysis = VADER_ANALYZER.polarity_scores(text)\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n CLIENT.send_message('/filter', freq)\n exchange = {text: bot_response}\n _log_conversation('conversation.db', exchange)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<function token>\n<function token>\n\n\ndef main(text):\n \"\"\"This is the main function to run the CHATBOT, analyse\n the responses with nltk and send OSC messages to Pure Data.\n \"\"\"\n bot_response = CHATBOT.get_response(text).text\n print(bot_response)\n analysis = VADER_ANALYZER.polarity_scores(text)\n freq = (analysis['compound'] - -1) / (1 - -1) * (800 - 200) + 200\n CLIENT.send_message('/filter', freq)\n exchange = {text: bot_response}\n _log_conversation('conversation.db', exchange)\n\n\n<code token>\n",
"<docstring token>\n<import token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,742 |
a315d01f0fb16f0c74c447c07b76f33e6ff6427d
|
from auth_passwordreset_reset import auth_passwordreset_reset
from auth_register import auth_register
from data import *
import pytest
#invalid reset code
def test_auth_passwordreset_reset1():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assuming that the code from the email was "WER123"
#this should not work as the code "ABS124" doesnt match "WER123"
with pytest.raises(ValueError, match='*Incorrect Reset Code*'):
auth_passwordreset_reset("ABS124", "SomePass")
#invalid password
def test_auth_passwordreset_reset2():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assume that the code generated was "AUW624"
#these should not work as the new passowrd lengths are <5
with pytest.raises(ValueError, match='*Invalid Password Length*'):
auth_passwordreset_reset("AUW624", "")
auth_passwordreset_reset("AUW624", "nope")
#valid case
def test_auth_passwordreset_reset3():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assume that the code generated was "AUW624"
auth_passwordreset_reset("AUW624", "Valispass12")
#test to see if password updated
assert new_user_password == "Valispass12"
#this sequence should successfully reset the password
|
[
"from auth_passwordreset_reset import auth_passwordreset_reset\nfrom auth_register import auth_register\nfrom data import *\nimport pytest\n\n\n#invalid reset code\ndef test_auth_passwordreset_reset1():\n \n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assuming that the code from the email was \"WER123\"\n \n #this should not work as the code \"ABS124\" doesnt match \"WER123\"\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset(\"ABS124\", \"SomePass\")\n \n#invalid password\ndef test_auth_passwordreset_reset2():\n\n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assume that the code generated was \"AUW624\"\n \n #these should not work as the new passowrd lengths are <5\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset(\"AUW624\", \"\")\n auth_passwordreset_reset(\"AUW624\", \"nope\")\n \n#valid case\ndef test_auth_passwordreset_reset3():\n \n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assume that the code generated was \"AUW624\"\n auth_passwordreset_reset(\"AUW624\", \"Valispass12\") \n \n #test to see if password updated\n assert new_user_password == \"Valispass12\"\n #this sequence should successfully reset the password\n",
"from auth_passwordreset_reset import auth_passwordreset_reset\nfrom auth_register import auth_register\nfrom data import *\nimport pytest\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\ndef test_auth_passwordreset_reset2():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset('AUW624', '')\n auth_passwordreset_reset('AUW624', 'nope')\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"<import token>\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\ndef test_auth_passwordreset_reset2():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset('AUW624', '')\n auth_passwordreset_reset('AUW624', 'nope')\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"<import token>\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\n<function token>\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"<import token>\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n"
] | false |
9,743 |
75b1674066958a8fa28e74121a35d688bcc473d9
|
from odoo import models, fields, api, _
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
date_start_invoice_timesheet = fields.Date(
string='Start Date',
help="Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. "
"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will "
"be invoiced without distinction.", required=True)
date_end_invoice_timesheet = fields.Date(
string='End Date',
help="Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. "
"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will "
"be invoiced without distinction.", required=True)
|
[
"from odoo import models, fields, api, _\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = \"sale.advance.payment.inv\"\n\n date_start_invoice_timesheet = fields.Date(\n string='Start Date',\n help=\"Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. \"\n \"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will \"\n \"be invoiced without distinction.\", required=True)\n date_end_invoice_timesheet = fields.Date(\n string='End Date',\n help=\"Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. \"\n \"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will \"\n \"be invoiced without distinction.\", required=True)\n",
"from odoo import models, fields, api, _\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = 'sale.advance.payment.inv'\n date_start_invoice_timesheet = fields.Date(string='Start Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n date_end_invoice_timesheet = fields.Date(string='End Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n",
"<import token>\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = 'sale.advance.payment.inv'\n date_start_invoice_timesheet = fields.Date(string='Start Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n date_end_invoice_timesheet = fields.Date(string='End Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n",
"<import token>\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
9,744 |
a718d82713503c4ce3d94225ff0db04991ad4094
|
# Generated by Django 3.0 on 2020-05-04 16:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('game_skeleton', '0001_initial'),
('contenttypes', '0002_remove_content_type_name'),
('class_room', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserHero',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime_created', models.DateTimeField(auto_now=True)),
('datetime_edited', models.DateTimeField(auto_now_add=True)),
('datetime_finished', models.DateTimeField(blank=True, null=True)),
('capacity', models.FloatField()),
('wallet', models.DecimalField(decimal_places=4, default=0.0, max_digits=10)),
('hero_class', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='game_skeleton.HeroClass')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='heroes', to='class_room.User')),
],
),
migrations.CreateModel(
name='EventHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('object_id', models.PositiveIntegerField()),
('is_draft', models.BooleanField(default=False, help_text='Draft note does not participate in hero capacity calculation.')),
('datetime_created', models.DateTimeField(auto_now=True)),
('datetime_edited', models.DateTimeField(auto_now_add=True)),
('author', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='actions', to='class_room.User')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='events', to='class_room.User')),
],
options={
'verbose_name_plural': 'User`s history events',
},
),
]
|
[
"# Generated by Django 3.0 on 2020-05-04 16:15\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('game_skeleton', '0001_initial'),\n ('contenttypes', '0002_remove_content_type_name'),\n ('class_room', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='UserHero',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('datetime_created', models.DateTimeField(auto_now=True)),\n ('datetime_edited', models.DateTimeField(auto_now_add=True)),\n ('datetime_finished', models.DateTimeField(blank=True, null=True)),\n ('capacity', models.FloatField()),\n ('wallet', models.DecimalField(decimal_places=4, default=0.0, max_digits=10)),\n ('hero_class', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='game_skeleton.HeroClass')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='heroes', to='class_room.User')),\n ],\n ),\n migrations.CreateModel(\n name='EventHistory',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('object_id', models.PositiveIntegerField()),\n ('is_draft', models.BooleanField(default=False, help_text='Draft note does not participate in hero capacity calculation.')),\n ('datetime_created', models.DateTimeField(auto_now=True)),\n ('datetime_edited', models.DateTimeField(auto_now_add=True)),\n ('author', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='actions', to='class_room.User')),\n ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='events', to='class_room.User')),\n ],\n options={\n 'verbose_name_plural': 'User`s history events',\n },\n ),\n ]\n",
"from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('game_skeleton', '0001_initial'), ('contenttypes',\n '0002_remove_content_type_name'), ('class_room', '0001_initial')]\n operations = [migrations.CreateModel(name='UserHero', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('datetime_created', models.\n DateTimeField(auto_now=True)), ('datetime_edited', models.\n DateTimeField(auto_now_add=True)), ('datetime_finished', models.\n DateTimeField(blank=True, null=True)), ('capacity', models.\n FloatField()), ('wallet', models.DecimalField(decimal_places=4,\n default=0.0, max_digits=10)), ('hero_class', models.OneToOneField(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'game_skeleton.HeroClass')), ('user', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, related_name='heroes', to=\n 'class_room.User'))]), migrations.CreateModel(name='EventHistory',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('object_id', models.\n PositiveIntegerField()), ('is_draft', models.BooleanField(default=\n False, help_text=\n 'Draft note does not participate in hero capacity calculation.')),\n ('datetime_created', models.DateTimeField(auto_now=True)), (\n 'datetime_edited', models.DateTimeField(auto_now_add=True)), (\n 'author', models.OneToOneField(null=True, on_delete=django.db.\n models.deletion.SET_NULL, related_name='actions', to=\n 'class_room.User')), ('content_type', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.\n CASCADE, related_name='events', to='class_room.User'))], options={\n 'verbose_name_plural': 'User`s history events'})]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('game_skeleton', '0001_initial'), ('contenttypes',\n '0002_remove_content_type_name'), ('class_room', '0001_initial')]\n operations = [migrations.CreateModel(name='UserHero', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('datetime_created', models.\n DateTimeField(auto_now=True)), ('datetime_edited', models.\n DateTimeField(auto_now_add=True)), ('datetime_finished', models.\n DateTimeField(blank=True, null=True)), ('capacity', models.\n FloatField()), ('wallet', models.DecimalField(decimal_places=4,\n default=0.0, max_digits=10)), ('hero_class', models.OneToOneField(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'game_skeleton.HeroClass')), ('user', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, related_name='heroes', to=\n 'class_room.User'))]), migrations.CreateModel(name='EventHistory',\n fields=[('id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('object_id', models.\n PositiveIntegerField()), ('is_draft', models.BooleanField(default=\n False, help_text=\n 'Draft note does not participate in hero capacity calculation.')),\n ('datetime_created', models.DateTimeField(auto_now=True)), (\n 'datetime_edited', models.DateTimeField(auto_now_add=True)), (\n 'author', models.OneToOneField(null=True, on_delete=django.db.\n models.deletion.SET_NULL, related_name='actions', to=\n 'class_room.User')), ('content_type', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.\n CASCADE, related_name='events', to='class_room.User'))], options={\n 'verbose_name_plural': 'User`s history events'})]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
9,745 |
afdb14d60374049753b3c980c717a13456c7ff5c
|
from django.contrib import admin
from django.urls import path
from .views import NewsCreateListView, NewsDetailGenericView
urlpatterns = [
path('news/', NewsCreateListView.as_view()),
path('news_detailed/<int:id>/', NewsDetailGenericView.as_view()),
]
|
[
"from django.contrib import admin\nfrom django.urls import path\nfrom .views import NewsCreateListView, NewsDetailGenericView\n\n\nurlpatterns = [\n path('news/', NewsCreateListView.as_view()),\n path('news_detailed/<int:id>/', NewsDetailGenericView.as_view()),\n\n]",
"from django.contrib import admin\nfrom django.urls import path\nfrom .views import NewsCreateListView, NewsDetailGenericView\nurlpatterns = [path('news/', NewsCreateListView.as_view()), path(\n 'news_detailed/<int:id>/', NewsDetailGenericView.as_view())]\n",
"<import token>\nurlpatterns = [path('news/', NewsCreateListView.as_view()), path(\n 'news_detailed/<int:id>/', NewsDetailGenericView.as_view())]\n",
"<import token>\n<assignment token>\n"
] | false |
9,746 |
cb6f68c8b8a6cead1d9fcd25fa2a4e60f7a8fb28
|
import math
def upsample1(d, p):
# 普通结界
assert 1 <= p <= 10
return d + p
def upsample2(d, p):
# 倍增结界
assert 2 <= p <= 3
return d * p
def downsample(d, p):
# 聚集结界
assert 2 <= p <= 10
return math.ceil(d / p)
# 初始化杀伤力范围
lethal_radius = 1
# 结界参数(z, p)
config = [(1, 6),
(2, 3),
(3, 3),
(2, 3),
(2, 3),
(3, 7)]
for i in range(int(input())):
z, p = list(map(int, input().strip().split()))
if z == 1:
lethal_radius = upsample1(lethal_radius, p)
if z == 2:
lethal_radius = upsample2(lethal_radius, p)
if z == 3:
lethal_radius = downsample(lethal_radius, p)
print(lethal_radius)
|
[
"import math\n\n\ndef upsample1(d, p):\n # 普通结界\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n # 倍增结界\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n # 聚集结界\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n# 初始化杀伤力范围\nlethal_radius = 1\n\n# 结界参数(z, p)\nconfig = [(1, 6),\n (2, 3),\n (3, 3),\n (2, 3),\n (2, 3),\n (3, 7)]\n\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n\n\n\n",
"import math\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"<import token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"<import token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<assignment token>\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"<import token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\n<function token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
9,747 |
0972bd1241ad91f54f8dfde6327ee226c27bf2ca
|
from datetime import datetime
import time
from os import system
import RPi.GPIO as GPIO
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(21, GPIO.OUT) # este pin es de salida carro
GPIO.setup(26, GPIO.OUT) # este pin es de salida carro
GPIO.setup(19, GPIO.OUT) # este pin es de salida carro
GPIO.setup(13, GPIO.OUT) # este pin es de salida carro
GPIO.setup(6, GPIO.OUT) # este pin es de salida carro
GPIO.setup(5, GPIO.OUT) # este pin es de salida carro
GPIO.setup(11, GPIO.OUT) # este pin es de salida carro
GPIO.setup(20, GPIO.IN) #Este pin es una entrada carro pequeno
GPIO.setup(16, GPIO.IN) #Este pin es una entrada carro grande
PATH_CRED = '/home/pi/Desktop/cred.json'
URL_DB = 'https://arquiii-default-rtdb.firebaseio.com/'
cred = credentials.Certificate(PATH_CRED)
firebase_admin.initialize_app(cred, {
'databaseURL': URL_DB
})
REF = db.reference("/")
REF.set({
'Proceso':
{
}
})
REF = db.reference("/Vehiculos")
while True:
tiempo = datetime.now()
#Si hay un 1 en el pin 20
if GPIO.input(20):
tiempoE = 5 #tiempo que va a cambiar por estacion
if GPIO.input(20):
tamano = "Pequeno"
elif GPIO.input(20) and GPIO.input(16):
tamano = "Grande"
else:
tamano = "Mediano"
print("Se ha detectado un automovil de tamano",tamano)
REF.push({
"Recepcion": str(tiempo),
"Tamano": tamano,
})
if (tiempo == 5):
print("Activacion de agua... ")
tiempo += 5
GPIO.output(26, True)
print("Desactivacion de agua...")
tiempo = datetime.now()
REF.push({
"Tiempo agua": str(tiempo),
})
GPIO.output(26, False)
elif (tiempo == 10):
print("Activacion de rocio de shampoo... ")
tiempo += 5
GPIO.output(19, True)
print("Desactivacion de rocio de shampoo...")
tiempo = datetime.now()
REF.push({
"Tiempo rocio": str(tiempo),
})
GPIO.output(19, False)
elif (tiempo == 15):
print("Activacion de rodillos de limpieza... ")
tiempo += 5
GPIO.output(13, True)
print("Desactivacion de rodillos de limpieza...")
tiempo = datetime.now()
REF.push({
"Tiempo rodillo": str(tiempo),
})
GPIO.output(13, False)
elif (tiempo == 20):
print("Activacion de escobas de limpieza ")
tiempo += 5
GPIO.output(6, True)
print("Desactivacion de escobas de limpieza...")
tiempo = datetime.now()
REF.push({
"Tiempo escoba": str(tiempo),
})
GPIO.output(6, False)
elif (tiempo == 25):
print("Activacion de rocio de agua 2nda vez ")
tiempo += 5
GPIO.output(5, True)
print("Desactivacion de rocio de agua 2nda vez...")
tiempo = datetime.now()
REF.push({
"Tiempo agua 2nda": str(tiempo),
})
GPIO.output(5, False)
elif (tiempo == 30):
print("Activacion de rodillos de secado")
tiempo += 5
GPIO.output(11, True)
print("Desactivacion de rodillos de secado...")
tiempo = datetime.now()
REF.push({
"Tiempo rodillos": str(tiempo),
})
GPIO.output(11, False)
GPIO.cleanup()
|
[
"from datetime import datetime\nimport time\nfrom os import system\nimport RPi.GPIO as GPIO\nimport firebase_admin\nfrom firebase_admin import credentials\nfrom firebase_admin import db\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM) \nGPIO.setup(21, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(26, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(19, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(13, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(6, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(5, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(11, GPIO.OUT) # este pin es de salida carro\nGPIO.setup(20, GPIO.IN) #Este pin es una entrada carro pequeno\nGPIO.setup(16, GPIO.IN) #Este pin es una entrada carro grande\n\n\nPATH_CRED = '/home/pi/Desktop/cred.json'\nURL_DB = 'https://arquiii-default-rtdb.firebaseio.com/'\ncred = credentials.Certificate(PATH_CRED)\nfirebase_admin.initialize_app(cred, {\n 'databaseURL': URL_DB\n})\nREF = db.reference(\"/\")\n\nREF.set({\n 'Proceso': \n {\n }\n})\n\nREF = db.reference(\"/Vehiculos\")\n\nwhile True:\n tiempo = datetime.now()\n #Si hay un 1 en el pin 20\n if GPIO.input(20):\n tiempoE = 5 #tiempo que va a cambiar por estacion\n if GPIO.input(20):\n tamano = \"Pequeno\"\n elif GPIO.input(20) and GPIO.input(16):\n tamano = \"Grande\"\n else:\n tamano = \"Mediano\"\n \n print(\"Se ha detectado un automovil de tamano\",tamano)\n REF.push({ \n \"Recepcion\": str(tiempo),\n \"Tamano\": tamano, \n })\n if (tiempo == 5):\n print(\"Activacion de agua... \")\n tiempo += 5\n GPIO.output(26, True)\n print(\"Desactivacion de agua...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo agua\": str(tiempo), \n })\n GPIO.output(26, False)\n elif (tiempo == 10):\n print(\"Activacion de rocio de shampoo... \")\n tiempo += 5\n GPIO.output(19, True)\n print(\"Desactivacion de rocio de shampoo...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo rocio\": str(tiempo), \n })\n GPIO.output(19, False)\n elif (tiempo == 15):\n print(\"Activacion de rodillos de limpieza... \")\n tiempo += 5\n GPIO.output(13, True)\n print(\"Desactivacion de rodillos de limpieza...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo rodillo\": str(tiempo), \n })\n GPIO.output(13, False)\n elif (tiempo == 20):\n print(\"Activacion de escobas de limpieza \")\n tiempo += 5\n GPIO.output(6, True)\n print(\"Desactivacion de escobas de limpieza...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo escoba\": str(tiempo), \n })\n GPIO.output(6, False)\n elif (tiempo == 25):\n print(\"Activacion de rocio de agua 2nda vez \")\n tiempo += 5\n GPIO.output(5, True)\n print(\"Desactivacion de rocio de agua 2nda vez...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo agua 2nda\": str(tiempo), \n })\n GPIO.output(5, False)\n elif (tiempo == 30):\n print(\"Activacion de rodillos de secado\")\n tiempo += 5\n GPIO.output(11, True)\n print(\"Desactivacion de rodillos de secado...\")\n tiempo = datetime.now()\n REF.push({ \n \"Tiempo rodillos\": str(tiempo), \n })\n GPIO.output(11, False)\n \nGPIO.cleanup()\n\n\n\n\n",
"from datetime import datetime\nimport time\nfrom os import system\nimport RPi.GPIO as GPIO\nimport firebase_admin\nfrom firebase_admin import credentials\nfrom firebase_admin import db\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(21, GPIO.OUT)\nGPIO.setup(26, GPIO.OUT)\nGPIO.setup(19, GPIO.OUT)\nGPIO.setup(13, GPIO.OUT)\nGPIO.setup(6, GPIO.OUT)\nGPIO.setup(5, GPIO.OUT)\nGPIO.setup(11, GPIO.OUT)\nGPIO.setup(20, GPIO.IN)\nGPIO.setup(16, GPIO.IN)\nPATH_CRED = '/home/pi/Desktop/cred.json'\nURL_DB = 'https://arquiii-default-rtdb.firebaseio.com/'\ncred = credentials.Certificate(PATH_CRED)\nfirebase_admin.initialize_app(cred, {'databaseURL': URL_DB})\nREF = db.reference('/')\nREF.set({'Proceso': {}})\nREF = db.reference('/Vehiculos')\nwhile True:\n tiempo = datetime.now()\n if GPIO.input(20):\n tiempoE = 5\n if GPIO.input(20):\n tamano = 'Pequeno'\n elif GPIO.input(20) and GPIO.input(16):\n tamano = 'Grande'\n else:\n tamano = 'Mediano'\n print('Se ha detectado un automovil de tamano', tamano)\n REF.push({'Recepcion': str(tiempo), 'Tamano': tamano})\n if tiempo == 5:\n print('Activacion de agua... ')\n tiempo += 5\n GPIO.output(26, True)\n print('Desactivacion de agua...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua': str(tiempo)})\n GPIO.output(26, False)\n elif tiempo == 10:\n print('Activacion de rocio de shampoo... ')\n tiempo += 5\n GPIO.output(19, True)\n print('Desactivacion de rocio de shampoo...')\n tiempo = datetime.now()\n REF.push({'Tiempo rocio': str(tiempo)})\n GPIO.output(19, False)\n elif tiempo == 15:\n print('Activacion de rodillos de limpieza... ')\n tiempo += 5\n GPIO.output(13, True)\n print('Desactivacion de rodillos de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillo': str(tiempo)})\n GPIO.output(13, False)\n elif tiempo == 20:\n print('Activacion de escobas de limpieza ')\n tiempo += 5\n GPIO.output(6, True)\n print('Desactivacion de escobas de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo escoba': str(tiempo)})\n GPIO.output(6, False)\n elif tiempo == 25:\n print('Activacion de rocio de agua 2nda vez ')\n tiempo += 5\n GPIO.output(5, True)\n print('Desactivacion de rocio de agua 2nda vez...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua 2nda': str(tiempo)})\n GPIO.output(5, False)\n elif tiempo == 30:\n print('Activacion de rodillos de secado')\n tiempo += 5\n GPIO.output(11, True)\n print('Desactivacion de rodillos de secado...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillos': str(tiempo)})\n GPIO.output(11, False)\nGPIO.cleanup()\n",
"<import token>\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(21, GPIO.OUT)\nGPIO.setup(26, GPIO.OUT)\nGPIO.setup(19, GPIO.OUT)\nGPIO.setup(13, GPIO.OUT)\nGPIO.setup(6, GPIO.OUT)\nGPIO.setup(5, GPIO.OUT)\nGPIO.setup(11, GPIO.OUT)\nGPIO.setup(20, GPIO.IN)\nGPIO.setup(16, GPIO.IN)\nPATH_CRED = '/home/pi/Desktop/cred.json'\nURL_DB = 'https://arquiii-default-rtdb.firebaseio.com/'\ncred = credentials.Certificate(PATH_CRED)\nfirebase_admin.initialize_app(cred, {'databaseURL': URL_DB})\nREF = db.reference('/')\nREF.set({'Proceso': {}})\nREF = db.reference('/Vehiculos')\nwhile True:\n tiempo = datetime.now()\n if GPIO.input(20):\n tiempoE = 5\n if GPIO.input(20):\n tamano = 'Pequeno'\n elif GPIO.input(20) and GPIO.input(16):\n tamano = 'Grande'\n else:\n tamano = 'Mediano'\n print('Se ha detectado un automovil de tamano', tamano)\n REF.push({'Recepcion': str(tiempo), 'Tamano': tamano})\n if tiempo == 5:\n print('Activacion de agua... ')\n tiempo += 5\n GPIO.output(26, True)\n print('Desactivacion de agua...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua': str(tiempo)})\n GPIO.output(26, False)\n elif tiempo == 10:\n print('Activacion de rocio de shampoo... ')\n tiempo += 5\n GPIO.output(19, True)\n print('Desactivacion de rocio de shampoo...')\n tiempo = datetime.now()\n REF.push({'Tiempo rocio': str(tiempo)})\n GPIO.output(19, False)\n elif tiempo == 15:\n print('Activacion de rodillos de limpieza... ')\n tiempo += 5\n GPIO.output(13, True)\n print('Desactivacion de rodillos de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillo': str(tiempo)})\n GPIO.output(13, False)\n elif tiempo == 20:\n print('Activacion de escobas de limpieza ')\n tiempo += 5\n GPIO.output(6, True)\n print('Desactivacion de escobas de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo escoba': str(tiempo)})\n GPIO.output(6, False)\n elif tiempo == 25:\n print('Activacion de rocio de agua 2nda vez ')\n tiempo += 5\n GPIO.output(5, True)\n print('Desactivacion de rocio de agua 2nda vez...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua 2nda': str(tiempo)})\n GPIO.output(5, False)\n elif tiempo == 30:\n print('Activacion de rodillos de secado')\n tiempo += 5\n GPIO.output(11, True)\n print('Desactivacion de rodillos de secado...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillos': str(tiempo)})\n GPIO.output(11, False)\nGPIO.cleanup()\n",
"<import token>\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(21, GPIO.OUT)\nGPIO.setup(26, GPIO.OUT)\nGPIO.setup(19, GPIO.OUT)\nGPIO.setup(13, GPIO.OUT)\nGPIO.setup(6, GPIO.OUT)\nGPIO.setup(5, GPIO.OUT)\nGPIO.setup(11, GPIO.OUT)\nGPIO.setup(20, GPIO.IN)\nGPIO.setup(16, GPIO.IN)\n<assignment token>\nfirebase_admin.initialize_app(cred, {'databaseURL': URL_DB})\n<assignment token>\nREF.set({'Proceso': {}})\n<assignment token>\nwhile True:\n tiempo = datetime.now()\n if GPIO.input(20):\n tiempoE = 5\n if GPIO.input(20):\n tamano = 'Pequeno'\n elif GPIO.input(20) and GPIO.input(16):\n tamano = 'Grande'\n else:\n tamano = 'Mediano'\n print('Se ha detectado un automovil de tamano', tamano)\n REF.push({'Recepcion': str(tiempo), 'Tamano': tamano})\n if tiempo == 5:\n print('Activacion de agua... ')\n tiempo += 5\n GPIO.output(26, True)\n print('Desactivacion de agua...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua': str(tiempo)})\n GPIO.output(26, False)\n elif tiempo == 10:\n print('Activacion de rocio de shampoo... ')\n tiempo += 5\n GPIO.output(19, True)\n print('Desactivacion de rocio de shampoo...')\n tiempo = datetime.now()\n REF.push({'Tiempo rocio': str(tiempo)})\n GPIO.output(19, False)\n elif tiempo == 15:\n print('Activacion de rodillos de limpieza... ')\n tiempo += 5\n GPIO.output(13, True)\n print('Desactivacion de rodillos de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillo': str(tiempo)})\n GPIO.output(13, False)\n elif tiempo == 20:\n print('Activacion de escobas de limpieza ')\n tiempo += 5\n GPIO.output(6, True)\n print('Desactivacion de escobas de limpieza...')\n tiempo = datetime.now()\n REF.push({'Tiempo escoba': str(tiempo)})\n GPIO.output(6, False)\n elif tiempo == 25:\n print('Activacion de rocio de agua 2nda vez ')\n tiempo += 5\n GPIO.output(5, True)\n print('Desactivacion de rocio de agua 2nda vez...')\n tiempo = datetime.now()\n REF.push({'Tiempo agua 2nda': str(tiempo)})\n GPIO.output(5, False)\n elif tiempo == 30:\n print('Activacion de rodillos de secado')\n tiempo += 5\n GPIO.output(11, True)\n print('Desactivacion de rodillos de secado...')\n tiempo = datetime.now()\n REF.push({'Tiempo rodillos': str(tiempo)})\n GPIO.output(11, False)\nGPIO.cleanup()\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,748 |
f8c222b1a84a092a3388cb801a88495bc227b1d5
|
import datetime
import hashlib
import json
from flask import Flask, jsonify, request
import requests
from uuid import uuid4
from urllib.parse import urlparse
from Crypto.PublicKey import RSA
# Part 1 - Building a Blockchain
class Blockchain:
#chain(emptylist) , farmer_details(emptylist), nodes(set), create_block(function to create the genesis block)
def __init__(self):
self.chain = []
self.farmer_details = []
self.create_block(proof = 1, previous_hash = '0')
self.nodes = set()
#It creates a dictionary block which contains index(length of chain+1),timestamp( by using the module datetime),
#Proof( passes as parameter),previous_hash(passed as parameter),
#Farmer_details(from self) and append this to the chain.
def create_block(self, proof, previous_hash):
block = {'index': len(self.chain) + 1,
'timestamp': str(datetime.datetime.now()),
'proof': proof,
'previous_hash': previous_hash,
'farmer_details': self.farmer_details}
self.farmer_details = []
self.chain.append(block)
return block
#It returns the last block of the chain.
def get_previous_block(self):
return self.chain[-1]
#It runs a lop and check if hash of new proof^2- previous proof^2 contains 4 leading zeroes.
#if yes,then it returns the new proof otherwise increment the new proof by 1 and iterates again.
def proof_of_work(self, previous_proof):
new_proof = 1
check_proof = False
while check_proof is False:
hash_operation = hashlib.sha256(str(new_proof**2 - previous_proof**2).encode()).hexdigest()
if hash_operation[:4] == '0000':
check_proof = True
else:
new_proof += 1
return new_proof
#- It returns the hash of the block using sha256
def hash(self, block):
encoded_block = json.dumps(block, sort_keys = True).encode()
return hashlib.sha256(encoded_block).hexdigest()
#It iterates a loop from 0 to chain length and check if hash of the block is same as returned by the hash function,
#then it checks if hash of the proof of current block^2-proof of previous block^2 contains 4 leading zeroes or not.
# if no, then chain is not valid.
def is_chain_valid(self, chain):
previous_block = chain[0]
block_index = 1
while block_index < len(chain):
block = chain[block_index]
if block['previous_hash'] != self.hash(previous_block):
return False
previous_proof = previous_block['proof']
proof = block['proof']
hash_operation = hashlib.sha256(str(proof**2 - previous_proof**2).encode()).hexdigest()
if hash_operation[:4] != '0000':
return False
previous_block = block
block_index += 1
return True
#- It creates the private key using the RSA.generate(1024),then creates the public key,
# hash of transaction(it is the hash of the sum of hashes of the name,crop_name,quantity,rate),
#data( it is the hash of the transaction in the int form),
#signature( it is created by raising the data to the power of privatekey.d%privatekey.n).
# Then it append a dictionary containing all these information in the hash format to the chain farmer_details
#and returns the index of the new block.
def add_farmerdetails(self, name, crop_name, quantity,rate):
privatekey = RSA.generate(1024)
publickey = privatekey.publickey()
hash_of_transaction=hashlib.sha256((hashlib.sha256(name.encode()).hexdigest()+hashlib.sha256(crop_name.encode()).hexdigest()+hashlib.sha256(str(quantity).encode()).hexdigest()+hashlib.sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()
data=int(hash_of_transaction,16)
signature=pow(data,privatekey.d,privatekey.n)
self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.encode()).hexdigest(),
'crop_name': hashlib.sha256(crop_name.encode()).hexdigest(),
'quantity_inkg': hashlib.sha256(str(quantity).encode()).hexdigest(),
'rate_perkg': hashlib.sha256(str(rate).encode()).hexdigest(),
'hash_of_transaction': hash_of_transaction,
'signature': signature
})
previous_block = self.get_previous_block()
return previous_block['index'] + 1
#It takes the url using urlparse of the address and then adds this to the set nodes in the self.
def add_node(self, address):
parsed_url = urlparse(address)
self.nodes.add(parsed_url.netloc)
#It access all the nodes in the set nodes and then iterates a loop to get their chain length using get_chain (to be described)
# and replaces the current chain with the longest chain of all the nodes.
def replace_chain(self):
network = self.nodes
longest_chain = None
max_length = len(self.chain)
for node in network:
response = requests.get(f'http://{node}/get_chain')
if response.status_code == 200:
length = response.json()['length']
chain = response.json()['chain']
if length > max_length and self.is_chain_valid(chain):
max_length = length
longest_chain = chain
if longest_chain:
self.chain = longest_chain
return True
return False
# Part 2 - Mining our Blockchain
# Creating a Web App
app = Flask(__name__)
# Creating an address for the node on Port 5001
node_address = str(uuid4()).replace('-', '')
# Creating a Blockchain
blockchain = Blockchain()
# Mining a new block
#- It access the previous block by calling the function get_previous_block(),
#then access the previous proof by previous_block[‘proof’],
#then it creates a new proof by using the function proof_of_work(‘previous_proof’),
#then it finds the hash of the previous block by using the function blockchain.hash(previous_block),
# then calls the function create_block( proof,previous_hash),then finds the hash of this block.
# It creates a response containing all the details of the new block,jsonify it and returns it.
@app.route('/mine_block', methods = ['GET'])
def mine_block():
previous_block = blockchain.get_previous_block()
previous_proof = previous_block['proof']
proof = blockchain.proof_of_work(previous_proof)
previous_hash = blockchain.hash(previous_block)
#blockchain.add_transaction(sender = node_address, receiver = 'Hadelin', amount = 1)
block = blockchain.create_block(proof, previous_hash)
current_block=blockchain.get_previous_block()
current_hash=blockchain.hash(current_block)
response = {'message': 'Congratulations, you just mined a block!',
'index': block['index'],
'timestamp': block['timestamp'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
'farmer': block['farmer_details'],
'current_hash': current_hash}
return jsonify(response), 200
# Getting the full Blockchain
#- It creates an empty list chain_till_now, then iterates over all the blocks in the blockchain and find it’s hash
#then check if the list farmer_details is empty or not,
#if it is empty then it appends a dictionary containing the current block’s index,timestamp,proof,previous_hash, current_hash, farmer_details.
# If the farmer_details list is not empty then it first finds the length of the list farmer_details
#then it iterates over the length of the list farmer_details and appends the hash of transaction
# contained within the dictionary of the list farmer_details. Then it creates the hash of this appended hash. This is the merged hash.
# Then it creates a dictionary containing merged hash,index,timestamp,proof,previous_hash,farmer_details and current hash.
# Then, it appends this dictionary to the list chain till now.
# It then creates the response containing the chain till now and length of the blockchain,jasonifies it and returns it.
@app.route('/print_chain',methods=['GET'])
def print_chain():
chain_till_now =[]
for xblock in blockchain.chain:
xcurrent_hash=blockchain.hash(xblock)
if len(xblock['farmer_details'])==0:
chain_till_now.append({'index': xblock['index'],
'timestamp': xblock['timestamp'],
'proof': xblock['proof'],
'previous_hash': xblock['previous_hash'],
'farmer': xblock['farmer_details'],
'current_hash': xcurrent_hash})
else:
l=len(xblock['farmer_details'])
sum=""
l-=1
while(l>=0):
sum=xblock['farmer_details'][l]['hash_of_transaction']+sum
l-=1
chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode()).hexdigest(),
'index': xblock['index'],
'timestamp': xblock['timestamp'],
'proof': xblock['proof'],
'previous_hash': xblock['previous_hash'],
'farmer': xblock['farmer_details'],
'current_hash': xcurrent_hash})
response = {'chain': chain_till_now,
'length': len(blockchain.chain)}
return jsonify(response), 200
#- It creats the response containing the blockchain.chain and its length,jasonifies it and returns it.
@app.route('/get_chain', methods = ['GET'])
def get_chain():
response = {'chain': blockchain.chain,
'length': len(blockchain.chain)}
return jsonify(response), 200
# Checking if the Blockchain is valid
#- It calls the function is_chain_valid and returns a string as response based on whether the chain is valid or not.
@app.route('/is_valid', methods = ['GET'])
def is_valid():
is_valid = blockchain.is_chain_valid(blockchain.chain)
if is_valid:
response = {'message': 'All good. The Blockchain is valid.'}
else:
response = {'message': 'Houston, we have a problem. The Blockchain is not valid.'}
return jsonify(response), 200
# Adding a new transaction to the Blockchain
#It takes the input in Jason format and checks if all the keys in the farmer keys(name_of_farmer,crop_name,quantity_inkg, rate_perkg) are available in the json file.
#If no, It returns that some elements are missing
# otherwise it calls the function add_farmer_details by passing the farmer details in the json file as parameter and
#returns the index of the block in which these details will be added.
@app.route('/add_farmerdetails', methods = ['POST'])
def add_farmer_details():
json = request.get_json()
farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg','rate_perkg']
if not all(key in json for key in farmer_keys):
return 'Some elements of the farmer_details are missing', 400
index = blockchain.add_farmerdetails(json['name_of_farmer'], json['crop_name'], json['quantity_inkg'], json['rate_perkg'])
response = {'message': f'These details will be added to Block {index}'}
return jsonify(response), 201
# Part 3 - Decentralizing our Blockchain
# Connecting new nodes
#It takes a Jason file as request and first check if it contains any node or not.
# If it contains the nodes then it calls the function blockchain.add_node .
#Then it returns the list of blockchain.nodes as response.
@app.route('/connect_node', methods = ['POST'])
def connect_node():
json = request.get_json()
nodes = json.get('nodes')
if nodes is None:
return "No node", 400
for node in nodes:
blockchain.add_node(node)
response = {'message': 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:',
'total_nodes': list(blockchain.nodes)}
return jsonify(response), 201
# Replacing the chain by the longest chain if needed
#- It calls the function blockcain.replace_chain. If the chain is replaced
#it returns the response with a message that the nodes has the different chains so the chain has been replaced by the longest chain alongwith the blockchain.chain.
# Otherwise it returns the response with a message all good the chain is the longest one with the blockchain.chain .
#then it jsonify the response and returns it.
@app.route('/replace_chain', methods = ['GET'])
def replace_chain():
is_chain_replaced = blockchain.replace_chain()
if is_chain_replaced:
response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',
'new_chain': blockchain.chain}
else:
response = {'message': 'All good. The chain is the largest one.',
'actual_chain': blockchain.chain}
return jsonify(response), 200
# Running the app
app.run(host = '0.0.0.0', port = 5001)
|
[
"\r\nimport datetime\r\nimport hashlib\r\nimport json\r\nfrom flask import Flask, jsonify, request\r\nimport requests\r\nfrom uuid import uuid4\r\nfrom urllib.parse import urlparse\r\nfrom Crypto.PublicKey import RSA\r\n\r\n# Part 1 - Building a Blockchain\r\n\r\nclass Blockchain:\r\n#chain(emptylist) , farmer_details(emptylist), nodes(set), create_block(function to create the genesis block)\r\n def __init__(self):\r\n self.chain = []\r\n self.farmer_details = []\r\n self.create_block(proof = 1, previous_hash = '0')\r\n self.nodes = set()\r\n#It creates a dictionary block which contains index(length of chain+1),timestamp( by using the module datetime),\r\n#Proof( passes as parameter),previous_hash(passed as parameter),\r\n#Farmer_details(from self) and append this to the chain.\r\n \r\n def create_block(self, proof, previous_hash):\r\n block = {'index': len(self.chain) + 1,\r\n 'timestamp': str(datetime.datetime.now()),\r\n 'proof': proof,\r\n 'previous_hash': previous_hash,\r\n 'farmer_details': self.farmer_details}\r\n self.farmer_details = []\r\n self.chain.append(block)\r\n return block\r\n#It returns the last block of the chain.\r\n def get_previous_block(self):\r\n return self.chain[-1]\r\n#It runs a lop and check if hash of new proof^2- previous proof^2 contains 4 leading zeroes. \r\n#if yes,then it returns the new proof otherwise increment the new proof by 1 and iterates again.\r\n def proof_of_work(self, previous_proof):\r\n new_proof = 1\r\n check_proof = False\r\n while check_proof is False:\r\n hash_operation = hashlib.sha256(str(new_proof**2 - previous_proof**2).encode()).hexdigest()\r\n if hash_operation[:4] == '0000':\r\n check_proof = True\r\n else:\r\n new_proof += 1\r\n return new_proof\r\n#- It returns the hash of the block using sha256 \r\n def hash(self, block):\r\n encoded_block = json.dumps(block, sort_keys = True).encode()\r\n return hashlib.sha256(encoded_block).hexdigest()\r\n#It iterates a loop from 0 to chain length and check if hash of the block is same as returned by the hash function, \r\n#then it checks if hash of the proof of current block^2-proof of previous block^2 contains 4 leading zeroes or not.\r\n# if no, then chain is not valid. \r\n def is_chain_valid(self, chain):\r\n previous_block = chain[0]\r\n block_index = 1\r\n while block_index < len(chain):\r\n block = chain[block_index]\r\n if block['previous_hash'] != self.hash(previous_block):\r\n return False\r\n previous_proof = previous_block['proof']\r\n proof = block['proof']\r\n hash_operation = hashlib.sha256(str(proof**2 - previous_proof**2).encode()).hexdigest()\r\n if hash_operation[:4] != '0000':\r\n return False\r\n previous_block = block\r\n block_index += 1\r\n return True\r\n#- It creates the private key using the RSA.generate(1024),then creates the public key,\r\n# hash of transaction(it is the hash of the sum of hashes of the name,crop_name,quantity,rate),\r\n#data( it is the hash of the transaction in the int form),\r\n#signature( it is created by raising the data to the power of privatekey.d%privatekey.n).\r\n# Then it append a dictionary containing all these information in the hash format to the chain farmer_details \r\n#and returns the index of the new block. \r\n def add_farmerdetails(self, name, crop_name, quantity,rate):\r\n privatekey = RSA.generate(1024) \r\n publickey = privatekey.publickey() \r\n hash_of_transaction=hashlib.sha256((hashlib.sha256(name.encode()).hexdigest()+hashlib.sha256(crop_name.encode()).hexdigest()+hashlib.sha256(str(quantity).encode()).hexdigest()+hashlib.sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\r\n data=int(hash_of_transaction,16)\r\n signature=pow(data,privatekey.d,privatekey.n)\r\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.encode()).hexdigest(),\r\n 'crop_name': hashlib.sha256(crop_name.encode()).hexdigest(),\r\n 'quantity_inkg': hashlib.sha256(str(quantity).encode()).hexdigest(),\r\n 'rate_perkg': hashlib.sha256(str(rate).encode()).hexdigest(),\r\n 'hash_of_transaction': hash_of_transaction,\r\n 'signature': signature\r\n })\r\n previous_block = self.get_previous_block()\r\n return previous_block['index'] + 1\r\n#It takes the url using urlparse of the address and then adds this to the set nodes in the self.\r\n def add_node(self, address):\r\n parsed_url = urlparse(address)\r\n self.nodes.add(parsed_url.netloc)\r\n#It access all the nodes in the set nodes and then iterates a loop to get their chain length using get_chain (to be described)\r\n# and replaces the current chain with the longest chain of all the nodes. \r\n def replace_chain(self):\r\n network = self.nodes\r\n longest_chain = None\r\n max_length = len(self.chain)\r\n for node in network:\r\n response = requests.get(f'http://{node}/get_chain')\r\n if response.status_code == 200:\r\n length = response.json()['length']\r\n chain = response.json()['chain']\r\n if length > max_length and self.is_chain_valid(chain):\r\n max_length = length\r\n longest_chain = chain\r\n if longest_chain:\r\n self.chain = longest_chain\r\n return True\r\n return False\r\n\r\n# Part 2 - Mining our Blockchain\r\n\r\n# Creating a Web App\r\napp = Flask(__name__)\r\n\r\n# Creating an address for the node on Port 5001\r\nnode_address = str(uuid4()).replace('-', '')\r\n\r\n# Creating a Blockchain\r\nblockchain = Blockchain()\r\n\r\n# Mining a new block\r\n#- It access the previous block by calling the function get_previous_block(), \r\n#then access the previous proof by previous_block[‘proof’],\r\n#then it creates a new proof by using the function proof_of_work(‘previous_proof’), \r\n#then it finds the hash of the previous block by using the function blockchain.hash(previous_block),\r\n# then calls the function create_block( proof,previous_hash),then finds the hash of this block.\r\n# It creates a response containing all the details of the new block,jsonify it and returns it.\r\[email protected]('/mine_block', methods = ['GET'])\r\ndef mine_block():\r\n previous_block = blockchain.get_previous_block()\r\n previous_proof = previous_block['proof']\r\n proof = blockchain.proof_of_work(previous_proof)\r\n previous_hash = blockchain.hash(previous_block)\r\n #blockchain.add_transaction(sender = node_address, receiver = 'Hadelin', amount = 1)\r\n block = blockchain.create_block(proof, previous_hash)\r\n current_block=blockchain.get_previous_block()\r\n current_hash=blockchain.hash(current_block)\r\n response = {'message': 'Congratulations, you just mined a block!',\r\n 'index': block['index'],\r\n 'timestamp': block['timestamp'],\r\n 'proof': block['proof'],\r\n 'previous_hash': block['previous_hash'],\r\n 'farmer': block['farmer_details'],\r\n 'current_hash': current_hash}\r\n return jsonify(response), 200\r\n\r\n# Getting the full Blockchain\r\n#- It creates an empty list chain_till_now, then iterates over all the blocks in the blockchain and find it’s hash \r\n#then check if the list farmer_details is empty or not, \r\n#if it is empty then it appends a dictionary containing the current block’s index,timestamp,proof,previous_hash, current_hash, farmer_details.\r\n# If the farmer_details list is not empty then it first finds the length of the list farmer_details \r\n#then it iterates over the length of the list farmer_details and appends the hash of transaction \r\n# contained within the dictionary of the list farmer_details. Then it creates the hash of this appended hash. This is the merged hash.\r\n# Then it creates a dictionary containing merged hash,index,timestamp,proof,previous_hash,farmer_details and current hash.\r\n# Then, it appends this dictionary to the list chain till now.\r\n# It then creates the response containing the chain till now and length of the blockchain,jasonifies it and returns it. \r\n\r\[email protected]('/print_chain',methods=['GET'])\r\ndef print_chain():\r\n chain_till_now =[]\r\n for xblock in blockchain.chain:\r\n xcurrent_hash=blockchain.hash(xblock) \r\n if len(xblock['farmer_details'])==0:\r\n chain_till_now.append({'index': xblock['index'],\r\n 'timestamp': xblock['timestamp'],\r\n 'proof': xblock['proof'],\r\n 'previous_hash': xblock['previous_hash'],\r\n 'farmer': xblock['farmer_details'],\r\n 'current_hash': xcurrent_hash})\r\n else:\r\n l=len(xblock['farmer_details'])\r\n sum=\"\"\r\n l-=1\r\n while(l>=0):\r\n sum=xblock['farmer_details'][l]['hash_of_transaction']+sum\r\n l-=1\r\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode()).hexdigest(),\r\n 'index': xblock['index'],\r\n 'timestamp': xblock['timestamp'],\r\n 'proof': xblock['proof'],\r\n 'previous_hash': xblock['previous_hash'],\r\n 'farmer': xblock['farmer_details'],\r\n 'current_hash': xcurrent_hash}) \r\n response = {'chain': chain_till_now,\r\n 'length': len(blockchain.chain)}\r\n return jsonify(response), 200\r\n\r\n#- It creats the response containing the blockchain.chain and its length,jasonifies it and returns it. \r\[email protected]('/get_chain', methods = ['GET'])\r\ndef get_chain():\r\n response = {'chain': blockchain.chain,\r\n 'length': len(blockchain.chain)}\r\n return jsonify(response), 200\r\n\r\n# Checking if the Blockchain is valid\r\n#- It calls the function is_chain_valid and returns a string as response based on whether the chain is valid or not.\r\[email protected]('/is_valid', methods = ['GET'])\r\ndef is_valid():\r\n is_valid = blockchain.is_chain_valid(blockchain.chain)\r\n if is_valid:\r\n response = {'message': 'All good. The Blockchain is valid.'}\r\n else:\r\n response = {'message': 'Houston, we have a problem. The Blockchain is not valid.'}\r\n return jsonify(response), 200\r\n\r\n# Adding a new transaction to the Blockchain\r\n#It takes the input in Jason format and checks if all the keys in the farmer keys(name_of_farmer,crop_name,quantity_inkg, rate_perkg) are available in the json file. \r\n#If no, It returns that some elements are missing\r\n# otherwise it calls the function add_farmer_details by passing the farmer details in the json file as parameter and \r\n#returns the index of the block in which these details will be added.\r\[email protected]('/add_farmerdetails', methods = ['POST'])\r\ndef add_farmer_details():\r\n json = request.get_json()\r\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg','rate_perkg']\r\n if not all(key in json for key in farmer_keys):\r\n return 'Some elements of the farmer_details are missing', 400\r\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json['crop_name'], json['quantity_inkg'], json['rate_perkg'])\r\n response = {'message': f'These details will be added to Block {index}'}\r\n return jsonify(response), 201\r\n\r\n# Part 3 - Decentralizing our Blockchain\r\n\r\n# Connecting new nodes\r\n#It takes a Jason file as request and first check if it contains any node or not.\r\n# If it contains the nodes then it calls the function blockchain.add_node .\r\n#Then it returns the list of blockchain.nodes as response.\r\[email protected]('/connect_node', methods = ['POST'])\r\ndef connect_node():\r\n json = request.get_json()\r\n nodes = json.get('nodes')\r\n if nodes is None:\r\n return \"No node\", 400\r\n for node in nodes:\r\n blockchain.add_node(node)\r\n response = {'message': 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:',\r\n 'total_nodes': list(blockchain.nodes)}\r\n return jsonify(response), 201\r\n\r\n# Replacing the chain by the longest chain if needed\r\n#- It calls the function blockcain.replace_chain. If the chain is replaced \r\n#it returns the response with a message that the nodes has the different chains so the chain has been replaced by the longest chain alongwith the blockchain.chain.\r\n# Otherwise it returns the response with a message all good the chain is the longest one with the blockchain.chain .\r\n#then it jsonify the response and returns it.\r\[email protected]('/replace_chain', methods = ['GET'])\r\ndef replace_chain():\r\n is_chain_replaced = blockchain.replace_chain()\r\n if is_chain_replaced:\r\n response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',\r\n 'new_chain': blockchain.chain}\r\n else:\r\n response = {'message': 'All good. The chain is the largest one.',\r\n 'actual_chain': blockchain.chain}\r\n return jsonify(response), 200\r\n\r\n# Running the app\r\napp.run(host = '0.0.0.0', port = 5001)\r\n",
"import datetime\nimport hashlib\nimport json\nfrom flask import Flask, jsonify, request\nimport requests\nfrom uuid import uuid4\nfrom urllib.parse import urlparse\nfrom Crypto.PublicKey import RSA\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\napp = Flask(__name__)\nnode_address = str(uuid4()).replace('-', '')\nblockchain = Blockchain()\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\napp = Flask(__name__)\nnode_address = str(uuid4()).replace('-', '')\nblockchain = Blockchain()\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\napp.run(host='0.0.0.0', port=5001)\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/get_chain', methods=['GET'])\ndef get_chain():\n response = {'chain': blockchain.chain, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n<function token>\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\[email protected]('/connect_node', methods=['POST'])\ndef connect_node():\n json = request.get_json()\n nodes = json.get('nodes')\n if nodes is None:\n return 'No node', 400\n for node in nodes:\n blockchain.add_node(node)\n response = {'message':\n 'All the nodes are now connected. The puspesh Blockchain now contains the following nodes:'\n , 'total_nodes': list(blockchain.nodes)}\n return jsonify(response), 201\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\[email protected]('/print_chain', methods=['GET'])\ndef print_chain():\n chain_till_now = []\n for xblock in blockchain.chain:\n xcurrent_hash = blockchain.hash(xblock)\n if len(xblock['farmer_details']) == 0:\n chain_till_now.append({'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n else:\n l = len(xblock['farmer_details'])\n sum = ''\n l -= 1\n while l >= 0:\n sum = xblock['farmer_details'][l]['hash_of_transaction'] + sum\n l -= 1\n chain_till_now.append({'Merged_hash': hashlib.sha256(sum.encode\n ()).hexdigest(), 'index': xblock['index'], 'timestamp':\n xblock['timestamp'], 'proof': xblock['proof'],\n 'previous_hash': xblock['previous_hash'], 'farmer': xblock[\n 'farmer_details'], 'current_hash': xcurrent_hash})\n response = {'chain': chain_till_now, 'length': len(blockchain.chain)}\n return jsonify(response), 200\n\n\n<function token>\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n<function token>\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n<function token>\n<function token>\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n<function token>\n\n\[email protected]('/replace_chain', methods=['GET'])\ndef replace_chain():\n is_chain_replaced = blockchain.replace_chain()\n if is_chain_replaced:\n response = {'message':\n 'The nodes had different chains so the chain was replaced by the longest one.'\n , 'new_chain': blockchain.chain}\n else:\n response = {'message': 'All good. The chain is the largest one.',\n 'actual_chain': blockchain.chain}\n return jsonify(response), 200\n\n\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n<function token>\n<function token>\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\[email protected]('/add_farmerdetails', methods=['POST'])\ndef add_farmer_details():\n json = request.get_json()\n farmer_keys = ['name_of_farmer', 'crop_name', 'quantity_inkg', 'rate_perkg'\n ]\n if not all(key in json for key in farmer_keys):\n return 'Some elements of the farmer_details are missing', 400\n index = blockchain.add_farmerdetails(json['name_of_farmer'], json[\n 'crop_name'], json['quantity_inkg'], json['rate_perkg'])\n response = {'message': f'These details will be added to Block {index}'}\n return jsonify(response), 201\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n<function token>\n<function token>\n\n\[email protected]('/is_valid', methods=['GET'])\ndef is_valid():\n is_valid = blockchain.is_chain_valid(blockchain.chain)\n if is_valid:\n response = {'message': 'All good. The Blockchain is valid.'}\n else:\n response = {'message':\n 'Houston, we have a problem. The Blockchain is not valid.'}\n return jsonify(response), 200\n\n\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n\n\[email protected]('/mine_block', methods=['GET'])\ndef mine_block():\n previous_block = blockchain.get_previous_block()\n previous_proof = previous_block['proof']\n proof = blockchain.proof_of_work(previous_proof)\n previous_hash = blockchain.hash(previous_block)\n block = blockchain.create_block(proof, previous_hash)\n current_block = blockchain.get_previous_block()\n current_hash = blockchain.hash(current_block)\n response = {'message': 'Congratulations, you just mined a block!',\n 'index': block['index'], 'timestamp': block['timestamp'], 'proof':\n block['proof'], 'previous_hash': block['previous_hash'], 'farmer':\n block['farmer_details'], 'current_hash': current_hash}\n return jsonify(response), 200\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n\n def add_farmerdetails(self, name, crop_name, quantity, rate):\n privatekey = RSA.generate(1024)\n publickey = privatekey.publickey()\n hash_of_transaction = hashlib.sha256((hashlib.sha256(name.encode())\n .hexdigest() + hashlib.sha256(crop_name.encode()).hexdigest() +\n hashlib.sha256(str(quantity).encode()).hexdigest() + hashlib.\n sha256(str(rate).encode()).hexdigest()).encode()).hexdigest()\n data = int(hash_of_transaction, 16)\n signature = pow(data, privatekey.d, privatekey.n)\n self.farmer_details.append({'name_of_farmer': hashlib.sha256(name.\n encode()).hexdigest(), 'crop_name': hashlib.sha256(crop_name.\n encode()).hexdigest(), 'quantity_inkg': hashlib.sha256(str(\n quantity).encode()).hexdigest(), 'rate_perkg': hashlib.sha256(\n str(rate).encode()).hexdigest(), 'hash_of_transaction':\n hash_of_transaction, 'signature': signature})\n previous_block = self.get_previous_block()\n return previous_block['index'] + 1\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n <function token>\n\n def add_node(self, address):\n parsed_url = urlparse(address)\n self.nodes.add(parsed_url.netloc)\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n\n def proof_of_work(self, previous_proof):\n new_proof = 1\n check_proof = False\n while check_proof is False:\n hash_operation = hashlib.sha256(str(new_proof ** 2 - \n previous_proof ** 2).encode()).hexdigest()\n if hash_operation[:4] == '0000':\n check_proof = True\n else:\n new_proof += 1\n return new_proof\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n <function token>\n <function token>\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n\n def get_previous_block(self):\n return self.chain[-1]\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n <function token>\n <function token>\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n <function token>\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n\n def is_chain_valid(self, chain):\n previous_block = chain[0]\n block_index = 1\n while block_index < len(chain):\n block = chain[block_index]\n if block['previous_hash'] != self.hash(previous_block):\n return False\n previous_proof = previous_block['proof']\n proof = block['proof']\n hash_operation = hashlib.sha256(str(proof ** 2 - previous_proof **\n 2).encode()).hexdigest()\n if hash_operation[:4] != '0000':\n return False\n previous_block = block\n block_index += 1\n return True\n <function token>\n <function token>\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n\n def __init__(self):\n self.chain = []\n self.farmer_details = []\n self.create_block(proof=1, previous_hash='0')\n self.nodes = set()\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n <function token>\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n <function token>\n <function token>\n <function token>\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n <function token>\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n <function token>\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n <function token>\n <function token>\n <function token>\n\n def replace_chain(self):\n network = self.nodes\n longest_chain = None\n max_length = len(self.chain)\n for node in network:\n response = requests.get(f'http://{node}/get_chain')\n if response.status_code == 200:\n length = response.json()['length']\n chain = response.json()['chain']\n if length > max_length and self.is_chain_valid(chain):\n max_length = length\n longest_chain = chain\n if longest_chain:\n self.chain = longest_chain\n return True\n return False\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n <function token>\n\n def create_block(self, proof, previous_hash):\n block = {'index': len(self.chain) + 1, 'timestamp': str(datetime.\n datetime.now()), 'proof': proof, 'previous_hash': previous_hash,\n 'farmer_details': self.farmer_details}\n self.farmer_details = []\n self.chain.append(block)\n return block\n <function token>\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def hash(self, block):\n encoded_block = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(encoded_block).hexdigest()\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass Blockchain:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,749 |
a1e563f94044ff7cd7e0e55542bc4ca2db81df28
|
#
# Author:: Noah Kantrowitz <[email protected]>
#
# Copyright 2014, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from fabric.api import task, roles
import pytest
from fabric_rundeck import visitor
def fixture_path(*path):
return os.path.join(os.path.dirname(__file__), 'data', *path)
class TestUnwrap(object):
@pytest.fixture
def fn(self):
def fn():
pass
return fn
def test_fn(self, fn):
assert visitor.unwrap(fn) is fn
def test_task(self, fn):
t = task(fn)
assert visitor.unwrap(t) is fn
def test_taskcall(self, fn):
t = task()(fn)
assert visitor.unwrap(t) is fn
def test_task_roles(self, fn):
t = task(roles('foo')(fn))
assert visitor.unwrap(t) is fn
def test_taskcall_roles(self, fn):
t = task()(roles('foo')(fn))
assert visitor.unwrap(t) is fn
def test_roles_task(self, fn):
t = roles('foo')(task(fn))
assert visitor.unwrap(t) is fn
def test_roles_taskcall(self, fn):
t = roles('foo')(task()(fn))
assert visitor.unwrap(t) is fn
def test_lambda(self):
fn = lambda: None
assert visitor.unwrap(fn) is fn
def test_lambda_task(self):
fn = lambda: None
t = task(fn)
assert visitor.unwrap(t) is fn
class TestVisitTask(object):
def test_no_args(self):
def fn():
pass
assert visitor.visit_task(fn, ()) == {
'name': 'fn',
'path': (),
'doc': None,
'cron': None,
'argspec': {
'args': [],
'varargs': None,
'keywords': None,
'defaults': None,
},
}
def test_simple_args(self):
def fn(a, b):
pass
assert visitor.visit_task(fn, ()) == {
'name': 'fn',
'path': (),
'doc': None,
'cron': None,
'argspec': {
'args': ['a', 'b'],
'varargs': None,
'keywords': None,
'defaults': None,
},
}
def test_arg_defaults(self):
def fn(a, b=1, c=None):
pass
assert visitor.visit_task(fn, ()) == {
'name': 'fn',
'path': (),
'doc': None,
'cron': None,
'argspec': {
'args': ['a', 'b', 'c'],
'varargs': None,
'keywords': None,
'defaults': (1, None),
},
}
def test_varargs(self):
def fn(*args, **kwargs):
pass
assert visitor.visit_task(fn, ()) == {
'name': 'fn',
'path': (),
'doc': None,
'cron': None,
'argspec': {
'args': [],
'varargs': 'args',
'keywords': 'kwargs',
'defaults': None,
},
}
def test_docs(self):
def fn(*args, **kwargs):
"""I am a teapot."""
pass
assert visitor.visit_task(fn, ()) == {
'name': 'fn',
'path': (),
'doc': 'I am a teapot.',
'cron': None,
'argspec': {
'args': [],
'varargs': 'args',
'keywords': 'kwargs',
'defaults': None,
},
}
class TestVisit(object):
def test_single(self):
def fn():
pass
callables = {
'fn': fn,
}
data = visitor.visit(callables)
assert len(data) == 1
assert data[0]['name'] == 'fn'
def test_multi(self):
def fn():
pass
def fn2():
pass
def fn3():
pass
callables = {
'fn': fn,
'fn2': fn2,
'fn3': fn3,
}
data = visitor.visit(callables)
assert len(data) == 3
assert data[0]['name'] == 'fn'
assert data[1]['name'] == 'fn2'
assert data[2]['name'] == 'fn3'
def test_nested(self):
def fn():
pass
def fn2():
pass
def fn3():
pass
callables = {
'fn': fn,
'mod': {
'fn2': fn2,
'fn3': fn3,
}
}
data = visitor.visit(callables)
assert len(data) == 3
assert data[0]['name'] == 'fn'
assert data[0]['path'] == ()
assert data[1]['name'] == 'fn2'
assert data[1]['path'] == ('mod',)
assert data[2]['name'] == 'fn3'
assert data[2]['path'] == ('mod',)
class TestVisitFabfile(object):
def test_one(self):
data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))
assert len(data) == 3
|
[
"#\n# Author:: Noah Kantrowitz <[email protected]>\n#\n# Copyright 2014, Noah Kantrowitz\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\n\nfrom fabric.api import task, roles\nimport pytest\n\nfrom fabric_rundeck import visitor\n\n\ndef fixture_path(*path):\n return os.path.join(os.path.dirname(__file__), 'data', *path)\n\n\nclass TestUnwrap(object):\n @pytest.fixture\n def fn(self):\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n def test_taskcall(self, fn):\n t = task()(fn)\n assert visitor.unwrap(t) is fn\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n\n def test_lambda(self):\n fn = lambda: None\n assert visitor.unwrap(fn) is fn\n\n def test_lambda_task(self):\n fn = lambda: None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n def test_no_args(self):\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {\n 'name': 'fn',\n 'path': (),\n 'doc': None,\n 'cron': None,\n 'argspec': {\n 'args': [],\n 'varargs': None,\n 'keywords': None,\n 'defaults': None,\n },\n }\n\n def test_simple_args(self):\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {\n 'name': 'fn',\n 'path': (),\n 'doc': None,\n 'cron': None,\n 'argspec': {\n 'args': ['a', 'b'],\n 'varargs': None,\n 'keywords': None,\n 'defaults': None,\n },\n }\n\n def test_arg_defaults(self):\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {\n 'name': 'fn',\n 'path': (),\n 'doc': None,\n 'cron': None,\n 'argspec': {\n 'args': ['a', 'b', 'c'],\n 'varargs': None,\n 'keywords': None,\n 'defaults': (1, None),\n },\n }\n\n def test_varargs(self):\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {\n 'name': 'fn',\n 'path': (),\n 'doc': None,\n 'cron': None,\n 'argspec': {\n 'args': [],\n 'varargs': 'args',\n 'keywords': 'kwargs',\n 'defaults': None,\n },\n }\n\n def test_docs(self):\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {\n 'name': 'fn',\n 'path': (),\n 'doc': 'I am a teapot.',\n 'cron': None,\n 'argspec': {\n 'args': [],\n 'varargs': 'args',\n 'keywords': 'kwargs',\n 'defaults': None,\n },\n }\n\n\nclass TestVisit(object):\n def test_single(self):\n def fn():\n pass\n callables = {\n 'fn': fn,\n }\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n def fn():\n pass\n def fn2():\n pass\n def fn3():\n pass\n callables = {\n 'fn': fn,\n 'fn2': fn2,\n 'fn3': fn3,\n }\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n def fn():\n pass\n def fn2():\n pass\n def fn3():\n pass\n callables = {\n 'fn': fn,\n 'mod': {\n 'fn2': fn2,\n 'fn3': fn3,\n }\n }\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"import os\nfrom fabric.api import task, roles\nimport pytest\nfrom fabric_rundeck import visitor\n\n\ndef fixture_path(*path):\n return os.path.join(os.path.dirname(__file__), 'data', *path)\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n def test_taskcall(self, fn):\n t = task()(fn)\n assert visitor.unwrap(t) is fn\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n\n def test_lambda(self):\n fn = lambda : None\n assert visitor.unwrap(fn) is fn\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n\n\ndef fixture_path(*path):\n return os.path.join(os.path.dirname(__file__), 'data', *path)\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n def test_taskcall(self, fn):\n t = task()(fn)\n assert visitor.unwrap(t) is fn\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n\n def test_lambda(self):\n fn = lambda : None\n assert visitor.unwrap(fn) is fn\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n def test_taskcall(self, fn):\n t = task()(fn)\n assert visitor.unwrap(t) is fn\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n\n def test_lambda(self):\n fn = lambda : None\n assert visitor.unwrap(fn) is fn\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n def test_taskcall(self, fn):\n t = task()(fn)\n assert visitor.unwrap(t) is fn\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_taskcall(self, fn):\n t = roles('foo')(task()(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n\n def test_fn(self, fn):\n assert visitor.unwrap(fn) is fn\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n\n def test_lambda_task(self):\n fn = lambda : None\n t = task(fn)\n assert visitor.unwrap(t) is fn\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n\n def test_task_roles(self, fn):\n t = task(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n\n def test_taskcall_roles(self, fn):\n t = task()(roles('foo')(fn))\n assert visitor.unwrap(t) is fn\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n\n def test_task(self, fn):\n t = task(fn)\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n <function token>\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_roles_task(self, fn):\n t = roles('foo')(task(fn))\n assert visitor.unwrap(t) is fn\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n\n @pytest.fixture\n def fn(self):\n\n def fn():\n pass\n return fn\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n\n\nclass TestUnwrap(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n\n def test_arg_defaults(self):\n\n def fn(a, b=1, c=None):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b', 'c'],\n 'varargs': None, 'keywords': None, 'defaults': (1, None)}}\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n\n def test_no_args(self):\n\n def fn():\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n None, 'keywords': None, 'defaults': None}}\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n <function token>\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n <function token>\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n <function token>\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n def test_docs(self):\n\n def fn(*args, **kwargs):\n \"\"\"I am a teapot.\"\"\"\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': 'I am a teapot.', 'cron': None, 'argspec': {'args': [],\n 'varargs': 'args', 'keywords': 'kwargs', 'defaults': None}}\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n <function token>\n\n def test_simple_args(self):\n\n def fn(a, b):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': ['a', 'b'],\n 'varargs': None, 'keywords': None, 'defaults': None}}\n <function token>\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n <function token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n <function token>\n <function token>\n <function token>\n\n def test_varargs(self):\n\n def fn(*args, **kwargs):\n pass\n assert visitor.visit_task(fn, ()) == {'name': 'fn', 'path': (),\n 'doc': None, 'cron': None, 'argspec': {'args': [], 'varargs':\n 'args', 'keywords': 'kwargs', 'defaults': None}}\n <function token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n\n\nclass TestVisitTask(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n\n def test_multi(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'fn2': fn2, 'fn3': fn3}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[1]['name'] == 'fn2'\n assert data[2]['name'] == 'fn3'\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n <function token>\n\n def test_nested(self):\n\n def fn():\n pass\n\n def fn2():\n pass\n\n def fn3():\n pass\n callables = {'fn': fn, 'mod': {'fn2': fn2, 'fn3': fn3}}\n data = visitor.visit(callables)\n assert len(data) == 3\n assert data[0]['name'] == 'fn'\n assert data[0]['path'] == ()\n assert data[1]['name'] == 'fn2'\n assert data[1]['path'] == ('mod',)\n assert data[2]['name'] == 'fn3'\n assert data[2]['path'] == ('mod',)\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestVisit(object):\n\n def test_single(self):\n\n def fn():\n pass\n callables = {'fn': fn}\n data = visitor.visit(callables)\n assert len(data) == 1\n assert data[0]['name'] == 'fn'\n <function token>\n <function token>\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n\n\nclass TestVisit(object):\n <function token>\n <function token>\n <function token>\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestVisitFabfile(object):\n\n def test_one(self):\n data = visitor.visit_fabfile(fixture_path('fabfile_one.py'))\n assert len(data) == 3\n",
"<import token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestVisitFabfile(object):\n <function token>\n",
"<import token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
9,750 |
084299da1c2f41de96e60d37088466c7b61de38e
|
from appJar import gui
app = gui("Calculator", "560x240")
### FUNCTIONS ###
n1, n2 = 0.0, 0.0
result = 0.0
isFirst = True
calc = ""
def doMath(btn):
global result, n1, n2, isFirst, calc
inputNumber()
if(btn == "Add"): calc = "a"
if(btn == "Substract"): calc = "s"
if(btn == "Multiply"): calc = "m"
if(btn == "Divide"): calc = "d"
app.clearEntry("Number")
def calculate(btn):
global result, n1, n2, isFirst, calc
inputNumber()
if(calc == 'a'): result = n1 + n2
if(calc == 's'): result = n1 - n2
if(calc == 'm'): result = n1 * n2
if(calc == 'd'):
try:
result = n1 / n2
except ZeroDivisionError:
clearOut(btn)
app.errorBox("DivisionByZero", "You can't divide by Zero.")
app.clearEntry("Number")
app.setLabel("Result", result)
def clearOut(btn):
global result, n1, n2, isFirst, calc
n1, n2 = 0.0, 0.0
result = 0.0
isFirst = True
calc = ""
def inputNumber():
global n1, n2, isFirst
if(isFirst):
n1 = app.getEntry("Number")
isFirst = False
else:
n2 = app.getEntry("Number")
isFirst = True
### FUNCTIONS ###
app.setStretch("column")
app.setSticky("")
app.setResizable(True)
app.addNumericEntry("Number")
app.setEntryDefault("Number", "Enter Number")
app.addButtons(["Add", "Substract", "Multiply", "Divide"], doMath)
app.addButtons(["Calculate!", "clearOut"], [calculate, clearOut])
app.setButton("clearOut", "C")
app.addEmptyLabel("Result")
app.go()
|
[
"from appJar import gui\n\napp = gui(\"Calculator\", \"560x240\")\n\n### FUNCTIONS ###\n\nn1, n2 = 0.0, 0.0\nresult = 0.0\nisFirst = True\ncalc = \"\"\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n\n inputNumber()\n\n if(btn == \"Add\"): calc = \"a\"\n if(btn == \"Substract\"): calc = \"s\"\n if(btn == \"Multiply\"): calc = \"m\"\n if(btn == \"Divide\"): calc = \"d\"\n\n app.clearEntry(\"Number\")\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n\n inputNumber()\n\n if(calc == 'a'): result = n1 + n2\n if(calc == 's'): result = n1 - n2\n if(calc == 'm'): result = n1 * n2\n if(calc == 'd'):\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox(\"DivisionByZero\", \"You can't divide by Zero.\")\n\n app.clearEntry(\"Number\")\n app.setLabel(\"Result\", result)\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = \"\"\n\ndef inputNumber():\n global n1, n2, isFirst\n\n if(isFirst):\n n1 = app.getEntry(\"Number\")\n isFirst = False\n else:\n n2 = app.getEntry(\"Number\")\n isFirst = True\n\n\n### FUNCTIONS ###\n\napp.setStretch(\"column\")\napp.setSticky(\"\")\napp.setResizable(True)\napp.addNumericEntry(\"Number\")\napp.setEntryDefault(\"Number\", \"Enter Number\")\n\napp.addButtons([\"Add\", \"Substract\", \"Multiply\", \"Divide\"], doMath)\napp.addButtons([\"Calculate!\", \"clearOut\"], [calculate, clearOut])\napp.setButton(\"clearOut\", \"C\")\n\napp.addEmptyLabel(\"Result\")\n\napp.go()\n",
"from appJar import gui\napp = gui('Calculator', '560x240')\nn1, n2 = 0.0, 0.0\nresult = 0.0\nisFirst = True\ncalc = ''\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = ''\n\n\ndef inputNumber():\n global n1, n2, isFirst\n if isFirst:\n n1 = app.getEntry('Number')\n isFirst = False\n else:\n n2 = app.getEntry('Number')\n isFirst = True\n\n\napp.setStretch('column')\napp.setSticky('')\napp.setResizable(True)\napp.addNumericEntry('Number')\napp.setEntryDefault('Number', 'Enter Number')\napp.addButtons(['Add', 'Substract', 'Multiply', 'Divide'], doMath)\napp.addButtons(['Calculate!', 'clearOut'], [calculate, clearOut])\napp.setButton('clearOut', 'C')\napp.addEmptyLabel('Result')\napp.go()\n",
"<import token>\napp = gui('Calculator', '560x240')\nn1, n2 = 0.0, 0.0\nresult = 0.0\nisFirst = True\ncalc = ''\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = ''\n\n\ndef inputNumber():\n global n1, n2, isFirst\n if isFirst:\n n1 = app.getEntry('Number')\n isFirst = False\n else:\n n2 = app.getEntry('Number')\n isFirst = True\n\n\napp.setStretch('column')\napp.setSticky('')\napp.setResizable(True)\napp.addNumericEntry('Number')\napp.setEntryDefault('Number', 'Enter Number')\napp.addButtons(['Add', 'Substract', 'Multiply', 'Divide'], doMath)\napp.addButtons(['Calculate!', 'clearOut'], [calculate, clearOut])\napp.setButton('clearOut', 'C')\napp.addEmptyLabel('Result')\napp.go()\n",
"<import token>\n<assignment token>\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = ''\n\n\ndef inputNumber():\n global n1, n2, isFirst\n if isFirst:\n n1 = app.getEntry('Number')\n isFirst = False\n else:\n n2 = app.getEntry('Number')\n isFirst = True\n\n\napp.setStretch('column')\napp.setSticky('')\napp.setResizable(True)\napp.addNumericEntry('Number')\napp.setEntryDefault('Number', 'Enter Number')\napp.addButtons(['Add', 'Substract', 'Multiply', 'Divide'], doMath)\napp.addButtons(['Calculate!', 'clearOut'], [calculate, clearOut])\napp.setButton('clearOut', 'C')\napp.addEmptyLabel('Result')\napp.go()\n",
"<import token>\n<assignment token>\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = ''\n\n\ndef inputNumber():\n global n1, n2, isFirst\n if isFirst:\n n1 = app.getEntry('Number')\n isFirst = False\n else:\n n2 = app.getEntry('Number')\n isFirst = True\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\ndef clearOut(btn):\n global result, n1, n2, isFirst, calc\n n1, n2 = 0.0, 0.0\n result = 0.0\n isFirst = True\n calc = ''\n\n\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef doMath(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if btn == 'Add':\n calc = 'a'\n if btn == 'Substract':\n calc = 's'\n if btn == 'Multiply':\n calc = 'm'\n if btn == 'Divide':\n calc = 'd'\n app.clearEntry('Number')\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef calculate(btn):\n global result, n1, n2, isFirst, calc\n inputNumber()\n if calc == 'a':\n result = n1 + n2\n if calc == 's':\n result = n1 - n2\n if calc == 'm':\n result = n1 * n2\n if calc == 'd':\n try:\n result = n1 / n2\n except ZeroDivisionError:\n clearOut(btn)\n app.errorBox('DivisionByZero', \"You can't divide by Zero.\")\n app.clearEntry('Number')\n app.setLabel('Result', result)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,751 |
3319614d154b16190f3cd8f4f65c3b0e0da277e9
|
# -*- coding: utf-8 -*-
class Solution:
"""
@param head: The first node of the linked list.
@return: The node where the cycle begins.
if there is no cycle, return null
"""
def detectCycle(self, head):
# write your code here
# 先确定是否有环,然后确定环的大小,再遍历确定位置。
cycle_len = -1
one_node, two_node = head, head
while two_node:
for i in xrange(2):
if two_node:
two_node = two_node.next
if two_node == one_node:
cycle_len = 1
two_node = one_node.next
while two_node != one_node: # 算出环的长度
cycle_len += 1
two_node = two_node.next
break
else:
break
one_node = one_node.next
if (not two_node) or (cycle_len != -1):
break
if cycle_len == -1:
return None
one_node, two_node = head, head # two_node先前进的距离等于环的长度
i = 0
while i < cycle_len:
two_node = two_node.next
i += 1
while one_node != two_node:
one_node = one_node.next
two_node = two_node.next
return one_node
|
[
"# -*- coding: utf-8 -*-\n\nclass Solution:\n \"\"\"\n @param head: The first node of the linked list.\n @return: The node where the cycle begins. \n if there is no cycle, return null\n \"\"\"\n def detectCycle(self, head):\n # write your code here\n # 先确定是否有环,然后确定环的大小,再遍历确定位置。\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node: # 算出环的长度\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if (not two_node) or (cycle_len != -1):\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head # two_node先前进的距离等于环的长度\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node",
"class Solution:\n \"\"\"\n @param head: The first node of the linked list.\n @return: The node where the cycle begins. \n if there is no cycle, return null\n \"\"\"\n\n def detectCycle(self, head):\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node:\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if not two_node or cycle_len != -1:\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node\n",
"class Solution:\n <docstring token>\n\n def detectCycle(self, head):\n cycle_len = -1\n one_node, two_node = head, head\n while two_node:\n for i in xrange(2):\n if two_node:\n two_node = two_node.next\n if two_node == one_node:\n cycle_len = 1\n two_node = one_node.next\n while two_node != one_node:\n cycle_len += 1\n two_node = two_node.next\n break\n else:\n break\n one_node = one_node.next\n if not two_node or cycle_len != -1:\n break\n if cycle_len == -1:\n return None\n one_node, two_node = head, head\n i = 0\n while i < cycle_len:\n two_node = two_node.next\n i += 1\n while one_node != two_node:\n one_node = one_node.next\n two_node = two_node.next\n return one_node\n",
"class Solution:\n <docstring token>\n <function token>\n",
"<class token>\n"
] | false |
9,752 |
b93f6c3192f8dd58b96dfdc6ea2b17e12cce34d0
|
from collections import defaultdict, deque
N = int(input())
adj_list = defaultdict(list)
E = []
V_number = [None]*N
for _ in range(N-1):
a, b = map(int, input().split())
E.append((a, b))
adj_list[a].append(b)
adj_list[b].append(a)
C = sorted(list(map(int, input().split())), reverse=True)
q = deque([1])
i = 0
while q:
v = q.popleft()
V_number[v-1] = C[i]
i += 1
for u in adj_list[v]:
if V_number[u-1] is None:
q.append(u)
print(sum(C[1:]))
print(*V_number)
|
[
"from collections import defaultdict, deque\n\nN = int(input())\nadj_list = defaultdict(list)\nE = []\nV_number = [None]*N\nfor _ in range(N-1):\n a, b = map(int, input().split())\n E.append((a, b))\n adj_list[a].append(b)\n adj_list[b].append(a)\nC = sorted(list(map(int, input().split())), reverse=True)\nq = deque([1])\ni = 0\nwhile q:\n v = q.popleft()\n V_number[v-1] = C[i]\n i += 1\n for u in adj_list[v]:\n if V_number[u-1] is None:\n q.append(u)\n\nprint(sum(C[1:]))\nprint(*V_number)",
"from collections import defaultdict, deque\nN = int(input())\nadj_list = defaultdict(list)\nE = []\nV_number = [None] * N\nfor _ in range(N - 1):\n a, b = map(int, input().split())\n E.append((a, b))\n adj_list[a].append(b)\n adj_list[b].append(a)\nC = sorted(list(map(int, input().split())), reverse=True)\nq = deque([1])\ni = 0\nwhile q:\n v = q.popleft()\n V_number[v - 1] = C[i]\n i += 1\n for u in adj_list[v]:\n if V_number[u - 1] is None:\n q.append(u)\nprint(sum(C[1:]))\nprint(*V_number)\n",
"<import token>\nN = int(input())\nadj_list = defaultdict(list)\nE = []\nV_number = [None] * N\nfor _ in range(N - 1):\n a, b = map(int, input().split())\n E.append((a, b))\n adj_list[a].append(b)\n adj_list[b].append(a)\nC = sorted(list(map(int, input().split())), reverse=True)\nq = deque([1])\ni = 0\nwhile q:\n v = q.popleft()\n V_number[v - 1] = C[i]\n i += 1\n for u in adj_list[v]:\n if V_number[u - 1] is None:\n q.append(u)\nprint(sum(C[1:]))\nprint(*V_number)\n",
"<import token>\n<assignment token>\nfor _ in range(N - 1):\n a, b = map(int, input().split())\n E.append((a, b))\n adj_list[a].append(b)\n adj_list[b].append(a)\n<assignment token>\nwhile q:\n v = q.popleft()\n V_number[v - 1] = C[i]\n i += 1\n for u in adj_list[v]:\n if V_number[u - 1] is None:\n q.append(u)\nprint(sum(C[1:]))\nprint(*V_number)\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,753 |
9535335c70129f997d7b8739444a503d0b984ac8
|
import json
import os
import pickle
import random
import urllib.request
from pathlib import Path
import tensorflow as tf
from matplotlib import pyplot as plt
class CNN(object):
def __init__(self):
self.model = tf.keras.Sequential([
tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 1)),
tf.keras.layers.MaxPool2D((2, 2)),
tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),
tf.keras.layers.MaxPool2D(2, 2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
self.last_training_history = {}
def print_model_info(self):
print(self.model.summary())
def get_model(self):
return self.model
def load_weights(self, filepath='model.h5'):
self.model.load_weights(filepath)
self.model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['acc']
)
def load_last_training_history(self, filepath='result.pk'):
with open(filepath, 'rb') as f:
self.last_training_history = pickle.load(f)
def get_last_training_history(self):
return self.last_training_history
def plot_last_training_history(self, save_plot=False):
for key in self.last_training_history:
y = self.last_training_history[key]
plt.plot([i + 1 for i in range(len(y))], y, label=key)
plt.legend()
plt.grid()
plt.xlabel('epoch')
if save_plot:
plt.savefig('training_history.png', dpi=300)
else:
plt.show()
def train(self, directory, epochs=100, save_model=False, save_history=False):
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255,
rotation_range=20,
width_shift_range=0.15,
height_shift_range=0.15,
shear_range=0.15,
zoom_range=0.15,
fill_mode='nearest',
horizontal_flip=True,
vertical_flip=False,
brightness_range=None,
channel_shift_range=0
)
test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
test_generator = test_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
self.model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['acc']
)
history = self.model.fit(
train_generator,
epochs=epochs,
validation_data=test_generator
)
if save_model:
self.model.save('model.h5')
if save_history:
with open('result.pk', 'wb') as f:
pickle.dump(history.history, f)
self.last_training_history = history.history
return history.history
def predict_directory(self, directory, probabilities=True):
if directory[-1] != '\\' and directory[-1] != '/':
directory += '/'
predictions = {}
onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
for image_file in onlyfiles:
img = tf.keras.preprocessing.image.load_img(directory + image_file, target_size=(150, 150),
color_mode='grayscale')
x = tf.keras.preprocessing.image.img_to_array(img, )
x = x.reshape((1,) + x.shape)
x = x / 255
y = self.model.predict(x)[0][0]
if probabilities:
predictions[image_file] = y
else:
predictions[image_file] = y > 0.5
return predictions
def predict_single_image(self, file_url):
self.load_weights()
self.load_last_training_history()
file_name = "image.jpg"
urllib.request.urlretrieve(file_url, file_name)
img = tf.keras.preprocessing.image.load_img(file_name, target_size=(150, 150),
color_mode='grayscale')
x = tf.keras.preprocessing.image.img_to_array(img, )
x = x.reshape((1,) + x.shape)
x = x / 255
prediction = self.model.predict(x)[0][0]
is_default_image = prediction < 0.5
print(prediction)
os.remove(file_name)
return json.dumps(True) if is_default_image else json.dumps(False)
def evaluate_on_directory(self, directory):
val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1. / 255)
val_generator = val_datagen.flow_from_directory(
directory,
target_size=(150, 150),
batch_size=32,
color_mode='grayscale',
class_mode='binary'
)
return self.model.evaluate(val_generator)
def split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):
assert train_size + test_size + val_size == 1
assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1
subdirs = next(os.walk(directory))[1]
if train_size > 0:
os.mkdir(directory + '/train')
for subdir in subdirs:
os.mkdir(directory + '/train/' + subdir)
if test_size > 0:
os.mkdir(directory + '/test')
for subdir in subdirs:
os.mkdir(directory + '/test/' + subdir)
if val_size > 0:
os.mkdir(directory + '/val')
for subdir in subdirs:
os.mkdir(directory + '/val/' + subdir)
pathlist = Path(directory).rglob('*.*')
for path in pathlist:
instance_path = str(path)
instance_properties = instance_path.split('/') if '/' in instance_path else instance_path.split('\\')
instance_name = instance_properties[-1]
instance_class = instance_properties[-2]
r = random.random()
if r < val_size:
subfolder = '/val/'
elif r < test_size + val_size:
subfolder = '/test/'
else:
subfolder = '/train/'
os.rename(instance_path, '/'.join(instance_properties[:-2]) + subfolder + instance_class + '/' + instance_name)
if __name__ == '__main__':
cnn = CNN()
cnn.load_weights()
cnn.load_last_training_history()
cnn.print_model_info()
|
[
"import json\nimport os\nimport pickle\nimport random\nimport urllib.request\nfrom pathlib import Path\n\nimport tensorflow as tf\nfrom matplotlib import pyplot as plt\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([\n tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 1)),\n tf.keras.layers.MaxPool2D((2, 2)),\n tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),\n tf.keras.layers.MaxPool2D(2, 2),\n tf.keras.layers.Flatten(),\n tf.keras.layers.Dropout(0.5),\n tf.keras.layers.Dense(512, activation='relu'),\n tf.keras.layers.Dense(1, activation='sigmoid')\n ])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(\n optimizer='adam',\n loss='binary_crossentropy',\n metrics=['acc']\n )\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([i + 1 for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n rescale=1. / 255,\n rotation_range=20,\n width_shift_range=0.15,\n height_shift_range=0.15,\n shear_range=0.15,\n zoom_range=0.15,\n fill_mode='nearest',\n horizontal_flip=True,\n vertical_flip=False,\n brightness_range=None,\n channel_shift_range=0\n )\n\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n rescale=1. / 255) \n\n train_generator = train_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n\n test_generator = test_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n\n self.model.compile(\n optimizer='adam',\n loss='binary_crossentropy',\n metrics=['acc']\n )\n\n history = self.model.fit(\n train_generator,\n epochs=epochs,\n validation_data=test_generator\n )\n\n if save_model:\n self.model.save('model.h5')\n\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n\n self.last_training_history = history.history\n\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory + image_file, target_size=(150, 150),\n color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img, )\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = \"image.jpg\"\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=(150, 150),\n color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img, )\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1. / 255)\n val_generator = val_datagen.flow_from_directory(\n directory,\n target_size=(150, 150),\n batch_size=32,\n color_mode='grayscale',\n class_mode='binary'\n )\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/') if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) + subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"import json\nimport os\nimport pickle\nimport random\nimport urllib.request\nfrom pathlib import Path\nimport tensorflow as tf\nfrom matplotlib import pyplot as plt\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"<import token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\nif __name__ == '__main__':\n cnn = CNN()\n cnn.load_weights()\n cnn.load_last_training_history()\n cnn.print_model_info()\n",
"<import token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\ndef evaluate_on_directory(self, directory):\n val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=\n 1.0 / 255)\n val_generator = val_datagen.flow_from_directory(directory, target_size=\n (150, 150), batch_size=32, color_mode='grayscale', class_mode='binary')\n return self.model.evaluate(val_generator)\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n\n\ndef split_directory(directory, train_size=0.75, test_size=0.2, val_size=0.05):\n assert train_size + test_size + val_size == 1\n assert 0 <= train_size <= 1 and 0 <= test_size <= 1 and 0 <= val_size <= 1\n subdirs = next(os.walk(directory))[1]\n if train_size > 0:\n os.mkdir(directory + '/train')\n for subdir in subdirs:\n os.mkdir(directory + '/train/' + subdir)\n if test_size > 0:\n os.mkdir(directory + '/test')\n for subdir in subdirs:\n os.mkdir(directory + '/test/' + subdir)\n if val_size > 0:\n os.mkdir(directory + '/val')\n for subdir in subdirs:\n os.mkdir(directory + '/val/' + subdir)\n pathlist = Path(directory).rglob('*.*')\n for path in pathlist:\n instance_path = str(path)\n instance_properties = instance_path.split('/'\n ) if '/' in instance_path else instance_path.split('\\\\')\n instance_name = instance_properties[-1]\n instance_class = instance_properties[-2]\n r = random.random()\n if r < val_size:\n subfolder = '/val/'\n elif r < test_size + val_size:\n subfolder = '/test/'\n else:\n subfolder = '/train/'\n os.rename(instance_path, '/'.join(instance_properties[:-2]) +\n subfolder + instance_class + '/' + instance_name)\n\n\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n\n def __init__(self):\n self.model = tf.keras.Sequential([tf.keras.layers.Conv2D(32, (3, 3),\n activation='relu', input_shape=(150, 150, 1)), tf.keras.layers.\n MaxPool2D((2, 2)), tf.keras.layers.Conv2D(64, (3, 3),\n activation='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.\n layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.\n MaxPool2D(2, 2), tf.keras.layers.Conv2D(128, (3, 3), activation\n ='relu'), tf.keras.layers.MaxPool2D(2, 2), tf.keras.layers.\n Flatten(), tf.keras.layers.Dropout(0.5), tf.keras.layers.Dense(\n 512, activation='relu'), tf.keras.layers.Dense(1, activation=\n 'sigmoid')])\n self.last_training_history = {}\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n\n def plot_last_training_history(self, save_plot=False):\n for key in self.last_training_history:\n y = self.last_training_history[key]\n plt.plot([(i + 1) for i in range(len(y))], y, label=key)\n plt.legend()\n plt.grid()\n plt.xlabel('epoch')\n if save_plot:\n plt.savefig('training_history.png', dpi=300)\n else:\n plt.show()\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n\n def load_last_training_history(self, filepath='result.pk'):\n with open(filepath, 'rb') as f:\n self.last_training_history = pickle.load(f)\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n\n def get_model(self):\n return self.model\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n <function token>\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n\n def predict_directory(self, directory, probabilities=True):\n if directory[-1] != '\\\\' and directory[-1] != '/':\n directory += '/'\n predictions = {}\n onlyfiles = [f for f in os.listdir(directory) if os.path.isfile(os.\n path.join(directory, f))]\n for image_file in onlyfiles:\n img = tf.keras.preprocessing.image.load_img(directory +\n image_file, target_size=(150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n y = self.model.predict(x)[0][0]\n if probabilities:\n predictions[image_file] = y\n else:\n predictions[image_file] = y > 0.5\n return predictions\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n <function token>\n\n def load_weights(self, filepath='model.h5'):\n self.model.load_weights(filepath)\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n <function token>\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n\n def print_model_info(self):\n print(self.model.summary())\n <function token>\n <function token>\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n <function token>\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n <function token>\n\n def predict_single_image(self, file_url):\n self.load_weights()\n self.load_last_training_history()\n file_name = 'image.jpg'\n urllib.request.urlretrieve(file_url, file_name)\n img = tf.keras.preprocessing.image.load_img(file_name, target_size=\n (150, 150), color_mode='grayscale')\n x = tf.keras.preprocessing.image.img_to_array(img)\n x = x.reshape((1,) + x.shape)\n x = x / 255\n prediction = self.model.predict(x)[0][0]\n is_default_image = prediction < 0.5\n print(prediction)\n os.remove(file_name)\n return json.dumps(True) if is_default_image else json.dumps(False)\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_last_training_history(self):\n return self.last_training_history\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def train(self, directory, epochs=100, save_model=False, save_history=False\n ):\n train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255, rotation_range=20, width_shift_range=0.15,\n height_shift_range=0.15, shear_range=0.15, zoom_range=0.15,\n fill_mode='nearest', horizontal_flip=True, vertical_flip=False,\n brightness_range=None, channel_shift_range=0)\n test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale\n =1.0 / 255)\n train_generator = train_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n test_generator = test_datagen.flow_from_directory(directory,\n target_size=(150, 150), batch_size=32, color_mode='grayscale',\n class_mode='binary')\n self.model.compile(optimizer='adam', loss='binary_crossentropy',\n metrics=['acc'])\n history = self.model.fit(train_generator, epochs=epochs,\n validation_data=test_generator)\n if save_model:\n self.model.save('model.h5')\n if save_history:\n with open('result.pk', 'wb') as f:\n pickle.dump(history.history, f)\n self.last_training_history = history.history\n return history.history\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n\n\nclass CNN(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<class token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,754 |
76f2312a01bf8475220a9fcc16209faddfccd2ae
|
import os
import sys
import logging.config
import sqlalchemy as sql
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Float, String, Text, Integer
import pandas as pd
import numpy as np
sys.path.append('./config')
import config
logging.basicConfig(level=logging.INFO, format='%(name)s - %(levelname)s - %(asctime)s - %(message)s')
logger = logging.getLogger(__file__)
Base = declarative_base()
class BeanAttributes(Base):
""" Defines the data model for the table `bean_attributes`. """
__tablename__ = 'bean_attributes'
id = Column(Integer, primary_key=True)
species = Column(String(100), unique=False, nullable=True)
owner = Column(String(100), unique=False, nullable=True)
country = Column(String(100), unique=False, nullable=True)
farm_name = Column(String(100), unique=False, nullable=True)
company = Column(String(100), unique=False, nullable=True)
region = Column(String(100), unique=False, nullable=True)
producer = Column(String(100), unique=False, nullable=True)
grading_date = Column(String(100), unique=False, nullable=True)
processing_method = Column(Text, unique=False, nullable=True)
aroma = Column(Float, unique=False, nullable=True)
flavor = Column(Float, unique=False, nullable=True)
aftertaste = Column(Float, unique=False, nullable=True)
acidity = Column(Float, unique=False, nullable=True)
body = Column(Float, unique=False, nullable=True)
balance = Column(Float, unique=False, nullable=True)
uniformity = Column(Float, unique=False, nullable=True)
cleancup = Column(Float, unique=False, nullable=True)
sweetness = Column(Float, unique=False, nullable=True)
total_cup_point = Column(Float, unique=False, nullable=True)
moisture = Column(Float, unique=False, nullable=True)
color = Column(String(100), unique=False, nullable=True)
cluster = Column(Integer, unique=False, nullable=True)
def __repr__(self):
return '<BeanAttributes %r>' % self.id
def persist_to_db(engine_string):
"""Persist the data to database.
Args:
engine_string (`str`): Engine string for SQLAlchemy.
Returns:
None.
"""
engine = sql.create_engine(engine_string)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
# Delete all existing records in the table
if config.LOCAL_DB_FLAG:
try:
session.execute('''DELETE FROM msia_db.bean_attributes''')
except:
pass
else:
try:
session.execute('''DELETE FROM bean_attributes''')
except:
pass
# Read the data table and persist it into the database
raw_data = pd.read_csv(config.DATA_TABLE_PATH)
raw_data = raw_data.replace(np.nan, '', regex=True)
try:
for i in range(raw_data.shape[0]):
bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']),
species=str(raw_data.iloc[i]['Species']),
owner=str(raw_data.iloc[i]['Owner.1']),
country=str(raw_data.iloc[i]['Country.of.Origin']),
farm_name=str(raw_data.iloc[i]['Farm.Name']),
company=str(raw_data.iloc[i]['Company']),
region=str(raw_data.iloc[i]['Region']),
producer=str(raw_data.iloc[i]['Producer']),
grading_date=str(raw_data.iloc[i]['Grading.Date']),
processing_method=str(raw_data.iloc[i]['Processing.Method']),
aroma=float(raw_data.iloc[i]['Aroma']),
flavor=float(raw_data.iloc[i]['Flavor']),
aftertaste=float(raw_data.iloc[i]['Aftertaste']),
acidity=float(raw_data.iloc[i]['Acidity']),
body=float(raw_data.iloc[i]['Body']),
balance=float(raw_data.iloc[i]['Balance']),
uniformity=float(raw_data.iloc[i]['Uniformity']),
cleancup=float(raw_data.iloc[i]['Clean.Cup']),
sweetness=float(raw_data.iloc[i]['Sweetness']),
total_cup_point=float(raw_data.iloc[i]['Total.Cup.Points']),
moisture=float(raw_data.iloc[i]['Moisture']),
color=str(raw_data.iloc[i]['Color']),
cluster=int(raw_data.iloc[i]['cluster'])
)
session.add(bean_row)
logger.debug('Row %d added to table ' % i)
session.commit()
except sql.exc.IntegrityError: # Check primary key duplication
logger.error("Duplicated coffee bean")
except Exception as e:
logger.error("Incorrect credentials, access denied", e)
finally:
session.close()
if __name__ == "__main__":
# Obtain parameters from os
conn_type = "mysql+pymysql"
user = os.environ.get("MYSQL_USER")
password = os.environ.get("MYSQL_PASSWORD")
host = os.environ.get("MYSQL_HOST")
port = os.environ.get("MYSQL_PORT")
database = os.environ.get("DATABASE_NAME")
local_database_path = config.LOCAL_DATABASE_PATH
# If users wish to write to their own SQLALCHEMY_DATABASE_URI in the environment
if config.SQLALCHEMY_DATABASE_URI is None:
# Whether to create a local SQLite database or an AWS RDS database
if config.LOCAL_DB_FLAG:
engine_string = "sqlite:///{}".format(local_database_path)
else:
engine_string = "{}://{}:{}@{}:{}/{}".format(conn_type, user, password, host, port, database)
else:
engine_string = config.SQLALCHEMY_DATABASE_URI
try:
engine_string = 'sqlite:///data/bean.db'
persist_to_db(engine_string)
logger.info("Data successfully persisted into the database")
except Exception as e:
logger.error(e)
sys.exit(1)
|
[
"import os\nimport sys\nimport logging.config\nimport sqlalchemy as sql\nfrom sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Float, String, Text, Integer\nimport pandas as pd\nimport numpy as np\nsys.path.append('./config')\nimport config\n\nlogging.basicConfig(level=logging.INFO, format='%(name)s - %(levelname)s - %(asctime)s - %(message)s')\nlogger = logging.getLogger(__file__)\n\nBase = declarative_base()\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n\n __tablename__ = 'bean_attributes'\n\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\ndef persist_to_db(engine_string):\n \"\"\"Persist the data to database.\n Args:\n engine_string (`str`): Engine string for SQLAlchemy.\n Returns:\n None.\n \"\"\"\n\n engine = sql.create_engine(engine_string)\n Base.metadata.create_all(engine)\n Session = sessionmaker(bind=engine)\n session = Session()\n\n # Delete all existing records in the table\n if config.LOCAL_DB_FLAG:\n try:\n session.execute('''DELETE FROM msia_db.bean_attributes''')\n except:\n pass\n else:\n try:\n session.execute('''DELETE FROM bean_attributes''')\n except:\n pass\n\n # Read the data table and persist it into the database\n raw_data = pd.read_csv(config.DATA_TABLE_PATH)\n raw_data = raw_data.replace(np.nan, '', regex=True)\n\n try:\n for i in range(raw_data.shape[0]):\n bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']),\n species=str(raw_data.iloc[i]['Species']),\n owner=str(raw_data.iloc[i]['Owner.1']),\n country=str(raw_data.iloc[i]['Country.of.Origin']),\n farm_name=str(raw_data.iloc[i]['Farm.Name']),\n company=str(raw_data.iloc[i]['Company']),\n region=str(raw_data.iloc[i]['Region']),\n producer=str(raw_data.iloc[i]['Producer']),\n grading_date=str(raw_data.iloc[i]['Grading.Date']),\n processing_method=str(raw_data.iloc[i]['Processing.Method']),\n aroma=float(raw_data.iloc[i]['Aroma']),\n flavor=float(raw_data.iloc[i]['Flavor']),\n aftertaste=float(raw_data.iloc[i]['Aftertaste']),\n acidity=float(raw_data.iloc[i]['Acidity']),\n body=float(raw_data.iloc[i]['Body']),\n balance=float(raw_data.iloc[i]['Balance']),\n uniformity=float(raw_data.iloc[i]['Uniformity']),\n cleancup=float(raw_data.iloc[i]['Clean.Cup']),\n sweetness=float(raw_data.iloc[i]['Sweetness']),\n total_cup_point=float(raw_data.iloc[i]['Total.Cup.Points']),\n moisture=float(raw_data.iloc[i]['Moisture']),\n color=str(raw_data.iloc[i]['Color']),\n cluster=int(raw_data.iloc[i]['cluster'])\n )\n session.add(bean_row)\n logger.debug('Row %d added to table ' % i)\n session.commit()\n except sql.exc.IntegrityError: # Check primary key duplication\n logger.error(\"Duplicated coffee bean\")\n except Exception as e:\n logger.error(\"Incorrect credentials, access denied\", e)\n finally:\n session.close()\n\n\nif __name__ == \"__main__\":\n\n # Obtain parameters from os\n conn_type = \"mysql+pymysql\"\n user = os.environ.get(\"MYSQL_USER\")\n password = os.environ.get(\"MYSQL_PASSWORD\")\n host = os.environ.get(\"MYSQL_HOST\")\n port = os.environ.get(\"MYSQL_PORT\")\n database = os.environ.get(\"DATABASE_NAME\")\n local_database_path = config.LOCAL_DATABASE_PATH\n\n # If users wish to write to their own SQLALCHEMY_DATABASE_URI in the environment\n if config.SQLALCHEMY_DATABASE_URI is None:\n # Whether to create a local SQLite database or an AWS RDS database\n if config.LOCAL_DB_FLAG:\n engine_string = \"sqlite:///{}\".format(local_database_path)\n else:\n engine_string = \"{}://{}:{}@{}:{}/{}\".format(conn_type, user, password, host, port, database)\n else:\n engine_string = config.SQLALCHEMY_DATABASE_URI\n\n try:\n engine_string = 'sqlite:///data/bean.db'\n persist_to_db(engine_string)\n logger.info(\"Data successfully persisted into the database\")\n except Exception as e:\n logger.error(e)\n sys.exit(1)\n\n\n",
"import os\nimport sys\nimport logging.config\nimport sqlalchemy as sql\nfrom sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Float, String, Text, Integer\nimport pandas as pd\nimport numpy as np\nsys.path.append('./config')\nimport config\nlogging.basicConfig(level=logging.INFO, format=\n '%(name)s - %(levelname)s - %(asctime)s - %(message)s')\nlogger = logging.getLogger(__file__)\nBase = declarative_base()\n\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\ndef persist_to_db(engine_string):\n \"\"\"Persist the data to database.\n Args:\n engine_string (`str`): Engine string for SQLAlchemy.\n Returns:\n None.\n \"\"\"\n engine = sql.create_engine(engine_string)\n Base.metadata.create_all(engine)\n Session = sessionmaker(bind=engine)\n session = Session()\n if config.LOCAL_DB_FLAG:\n try:\n session.execute('DELETE FROM msia_db.bean_attributes')\n except:\n pass\n else:\n try:\n session.execute('DELETE FROM bean_attributes')\n except:\n pass\n raw_data = pd.read_csv(config.DATA_TABLE_PATH)\n raw_data = raw_data.replace(np.nan, '', regex=True)\n try:\n for i in range(raw_data.shape[0]):\n bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']\n ), species=str(raw_data.iloc[i]['Species']), owner=str(\n raw_data.iloc[i]['Owner.1']), country=str(raw_data.iloc[i][\n 'Country.of.Origin']), farm_name=str(raw_data.iloc[i][\n 'Farm.Name']), company=str(raw_data.iloc[i]['Company']),\n region=str(raw_data.iloc[i]['Region']), producer=str(\n raw_data.iloc[i]['Producer']), grading_date=str(raw_data.\n iloc[i]['Grading.Date']), processing_method=str(raw_data.\n iloc[i]['Processing.Method']), aroma=float(raw_data.iloc[i]\n ['Aroma']), flavor=float(raw_data.iloc[i]['Flavor']),\n aftertaste=float(raw_data.iloc[i]['Aftertaste']), acidity=\n float(raw_data.iloc[i]['Acidity']), body=float(raw_data.\n iloc[i]['Body']), balance=float(raw_data.iloc[i]['Balance']\n ), uniformity=float(raw_data.iloc[i]['Uniformity']),\n cleancup=float(raw_data.iloc[i]['Clean.Cup']), sweetness=\n float(raw_data.iloc[i]['Sweetness']), total_cup_point=float\n (raw_data.iloc[i]['Total.Cup.Points']), moisture=float(\n raw_data.iloc[i]['Moisture']), color=str(raw_data.iloc[i][\n 'Color']), cluster=int(raw_data.iloc[i]['cluster']))\n session.add(bean_row)\n logger.debug('Row %d added to table ' % i)\n session.commit()\n except sql.exc.IntegrityError:\n logger.error('Duplicated coffee bean')\n except Exception as e:\n logger.error('Incorrect credentials, access denied', e)\n finally:\n session.close()\n\n\nif __name__ == '__main__':\n conn_type = 'mysql+pymysql'\n user = os.environ.get('MYSQL_USER')\n password = os.environ.get('MYSQL_PASSWORD')\n host = os.environ.get('MYSQL_HOST')\n port = os.environ.get('MYSQL_PORT')\n database = os.environ.get('DATABASE_NAME')\n local_database_path = config.LOCAL_DATABASE_PATH\n if config.SQLALCHEMY_DATABASE_URI is None:\n if config.LOCAL_DB_FLAG:\n engine_string = 'sqlite:///{}'.format(local_database_path)\n else:\n engine_string = '{}://{}:{}@{}:{}/{}'.format(conn_type, user,\n password, host, port, database)\n else:\n engine_string = config.SQLALCHEMY_DATABASE_URI\n try:\n engine_string = 'sqlite:///data/bean.db'\n persist_to_db(engine_string)\n logger.info('Data successfully persisted into the database')\n except Exception as e:\n logger.error(e)\n sys.exit(1)\n",
"<import token>\nsys.path.append('./config')\n<import token>\nlogging.basicConfig(level=logging.INFO, format=\n '%(name)s - %(levelname)s - %(asctime)s - %(message)s')\nlogger = logging.getLogger(__file__)\nBase = declarative_base()\n\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\ndef persist_to_db(engine_string):\n \"\"\"Persist the data to database.\n Args:\n engine_string (`str`): Engine string for SQLAlchemy.\n Returns:\n None.\n \"\"\"\n engine = sql.create_engine(engine_string)\n Base.metadata.create_all(engine)\n Session = sessionmaker(bind=engine)\n session = Session()\n if config.LOCAL_DB_FLAG:\n try:\n session.execute('DELETE FROM msia_db.bean_attributes')\n except:\n pass\n else:\n try:\n session.execute('DELETE FROM bean_attributes')\n except:\n pass\n raw_data = pd.read_csv(config.DATA_TABLE_PATH)\n raw_data = raw_data.replace(np.nan, '', regex=True)\n try:\n for i in range(raw_data.shape[0]):\n bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']\n ), species=str(raw_data.iloc[i]['Species']), owner=str(\n raw_data.iloc[i]['Owner.1']), country=str(raw_data.iloc[i][\n 'Country.of.Origin']), farm_name=str(raw_data.iloc[i][\n 'Farm.Name']), company=str(raw_data.iloc[i]['Company']),\n region=str(raw_data.iloc[i]['Region']), producer=str(\n raw_data.iloc[i]['Producer']), grading_date=str(raw_data.\n iloc[i]['Grading.Date']), processing_method=str(raw_data.\n iloc[i]['Processing.Method']), aroma=float(raw_data.iloc[i]\n ['Aroma']), flavor=float(raw_data.iloc[i]['Flavor']),\n aftertaste=float(raw_data.iloc[i]['Aftertaste']), acidity=\n float(raw_data.iloc[i]['Acidity']), body=float(raw_data.\n iloc[i]['Body']), balance=float(raw_data.iloc[i]['Balance']\n ), uniformity=float(raw_data.iloc[i]['Uniformity']),\n cleancup=float(raw_data.iloc[i]['Clean.Cup']), sweetness=\n float(raw_data.iloc[i]['Sweetness']), total_cup_point=float\n (raw_data.iloc[i]['Total.Cup.Points']), moisture=float(\n raw_data.iloc[i]['Moisture']), color=str(raw_data.iloc[i][\n 'Color']), cluster=int(raw_data.iloc[i]['cluster']))\n session.add(bean_row)\n logger.debug('Row %d added to table ' % i)\n session.commit()\n except sql.exc.IntegrityError:\n logger.error('Duplicated coffee bean')\n except Exception as e:\n logger.error('Incorrect credentials, access denied', e)\n finally:\n session.close()\n\n\nif __name__ == '__main__':\n conn_type = 'mysql+pymysql'\n user = os.environ.get('MYSQL_USER')\n password = os.environ.get('MYSQL_PASSWORD')\n host = os.environ.get('MYSQL_HOST')\n port = os.environ.get('MYSQL_PORT')\n database = os.environ.get('DATABASE_NAME')\n local_database_path = config.LOCAL_DATABASE_PATH\n if config.SQLALCHEMY_DATABASE_URI is None:\n if config.LOCAL_DB_FLAG:\n engine_string = 'sqlite:///{}'.format(local_database_path)\n else:\n engine_string = '{}://{}:{}@{}:{}/{}'.format(conn_type, user,\n password, host, port, database)\n else:\n engine_string = config.SQLALCHEMY_DATABASE_URI\n try:\n engine_string = 'sqlite:///data/bean.db'\n persist_to_db(engine_string)\n logger.info('Data successfully persisted into the database')\n except Exception as e:\n logger.error(e)\n sys.exit(1)\n",
"<import token>\nsys.path.append('./config')\n<import token>\nlogging.basicConfig(level=logging.INFO, format=\n '%(name)s - %(levelname)s - %(asctime)s - %(message)s')\n<assignment token>\n\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\ndef persist_to_db(engine_string):\n \"\"\"Persist the data to database.\n Args:\n engine_string (`str`): Engine string for SQLAlchemy.\n Returns:\n None.\n \"\"\"\n engine = sql.create_engine(engine_string)\n Base.metadata.create_all(engine)\n Session = sessionmaker(bind=engine)\n session = Session()\n if config.LOCAL_DB_FLAG:\n try:\n session.execute('DELETE FROM msia_db.bean_attributes')\n except:\n pass\n else:\n try:\n session.execute('DELETE FROM bean_attributes')\n except:\n pass\n raw_data = pd.read_csv(config.DATA_TABLE_PATH)\n raw_data = raw_data.replace(np.nan, '', regex=True)\n try:\n for i in range(raw_data.shape[0]):\n bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']\n ), species=str(raw_data.iloc[i]['Species']), owner=str(\n raw_data.iloc[i]['Owner.1']), country=str(raw_data.iloc[i][\n 'Country.of.Origin']), farm_name=str(raw_data.iloc[i][\n 'Farm.Name']), company=str(raw_data.iloc[i]['Company']),\n region=str(raw_data.iloc[i]['Region']), producer=str(\n raw_data.iloc[i]['Producer']), grading_date=str(raw_data.\n iloc[i]['Grading.Date']), processing_method=str(raw_data.\n iloc[i]['Processing.Method']), aroma=float(raw_data.iloc[i]\n ['Aroma']), flavor=float(raw_data.iloc[i]['Flavor']),\n aftertaste=float(raw_data.iloc[i]['Aftertaste']), acidity=\n float(raw_data.iloc[i]['Acidity']), body=float(raw_data.\n iloc[i]['Body']), balance=float(raw_data.iloc[i]['Balance']\n ), uniformity=float(raw_data.iloc[i]['Uniformity']),\n cleancup=float(raw_data.iloc[i]['Clean.Cup']), sweetness=\n float(raw_data.iloc[i]['Sweetness']), total_cup_point=float\n (raw_data.iloc[i]['Total.Cup.Points']), moisture=float(\n raw_data.iloc[i]['Moisture']), color=str(raw_data.iloc[i][\n 'Color']), cluster=int(raw_data.iloc[i]['cluster']))\n session.add(bean_row)\n logger.debug('Row %d added to table ' % i)\n session.commit()\n except sql.exc.IntegrityError:\n logger.error('Duplicated coffee bean')\n except Exception as e:\n logger.error('Incorrect credentials, access denied', e)\n finally:\n session.close()\n\n\nif __name__ == '__main__':\n conn_type = 'mysql+pymysql'\n user = os.environ.get('MYSQL_USER')\n password = os.environ.get('MYSQL_PASSWORD')\n host = os.environ.get('MYSQL_HOST')\n port = os.environ.get('MYSQL_PORT')\n database = os.environ.get('DATABASE_NAME')\n local_database_path = config.LOCAL_DATABASE_PATH\n if config.SQLALCHEMY_DATABASE_URI is None:\n if config.LOCAL_DB_FLAG:\n engine_string = 'sqlite:///{}'.format(local_database_path)\n else:\n engine_string = '{}://{}:{}@{}:{}/{}'.format(conn_type, user,\n password, host, port, database)\n else:\n engine_string = config.SQLALCHEMY_DATABASE_URI\n try:\n engine_string = 'sqlite:///data/bean.db'\n persist_to_db(engine_string)\n logger.info('Data successfully persisted into the database')\n except Exception as e:\n logger.error(e)\n sys.exit(1)\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\ndef persist_to_db(engine_string):\n \"\"\"Persist the data to database.\n Args:\n engine_string (`str`): Engine string for SQLAlchemy.\n Returns:\n None.\n \"\"\"\n engine = sql.create_engine(engine_string)\n Base.metadata.create_all(engine)\n Session = sessionmaker(bind=engine)\n session = Session()\n if config.LOCAL_DB_FLAG:\n try:\n session.execute('DELETE FROM msia_db.bean_attributes')\n except:\n pass\n else:\n try:\n session.execute('DELETE FROM bean_attributes')\n except:\n pass\n raw_data = pd.read_csv(config.DATA_TABLE_PATH)\n raw_data = raw_data.replace(np.nan, '', regex=True)\n try:\n for i in range(raw_data.shape[0]):\n bean_row = BeanAttributes(id=int(raw_data.iloc[i]['Unnamed: 0']\n ), species=str(raw_data.iloc[i]['Species']), owner=str(\n raw_data.iloc[i]['Owner.1']), country=str(raw_data.iloc[i][\n 'Country.of.Origin']), farm_name=str(raw_data.iloc[i][\n 'Farm.Name']), company=str(raw_data.iloc[i]['Company']),\n region=str(raw_data.iloc[i]['Region']), producer=str(\n raw_data.iloc[i]['Producer']), grading_date=str(raw_data.\n iloc[i]['Grading.Date']), processing_method=str(raw_data.\n iloc[i]['Processing.Method']), aroma=float(raw_data.iloc[i]\n ['Aroma']), flavor=float(raw_data.iloc[i]['Flavor']),\n aftertaste=float(raw_data.iloc[i]['Aftertaste']), acidity=\n float(raw_data.iloc[i]['Acidity']), body=float(raw_data.\n iloc[i]['Body']), balance=float(raw_data.iloc[i]['Balance']\n ), uniformity=float(raw_data.iloc[i]['Uniformity']),\n cleancup=float(raw_data.iloc[i]['Clean.Cup']), sweetness=\n float(raw_data.iloc[i]['Sweetness']), total_cup_point=float\n (raw_data.iloc[i]['Total.Cup.Points']), moisture=float(\n raw_data.iloc[i]['Moisture']), color=str(raw_data.iloc[i][\n 'Color']), cluster=int(raw_data.iloc[i]['cluster']))\n session.add(bean_row)\n logger.debug('Row %d added to table ' % i)\n session.commit()\n except sql.exc.IntegrityError:\n logger.error('Duplicated coffee bean')\n except Exception as e:\n logger.error('Incorrect credentials, access denied', e)\n finally:\n session.close()\n\n\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n\n\nclass BeanAttributes(Base):\n \"\"\" Defines the data model for the table `bean_attributes`. \"\"\"\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\n<function token>\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n\n\nclass BeanAttributes(Base):\n <docstring token>\n __tablename__ = 'bean_attributes'\n id = Column(Integer, primary_key=True)\n species = Column(String(100), unique=False, nullable=True)\n owner = Column(String(100), unique=False, nullable=True)\n country = Column(String(100), unique=False, nullable=True)\n farm_name = Column(String(100), unique=False, nullable=True)\n company = Column(String(100), unique=False, nullable=True)\n region = Column(String(100), unique=False, nullable=True)\n producer = Column(String(100), unique=False, nullable=True)\n grading_date = Column(String(100), unique=False, nullable=True)\n processing_method = Column(Text, unique=False, nullable=True)\n aroma = Column(Float, unique=False, nullable=True)\n flavor = Column(Float, unique=False, nullable=True)\n aftertaste = Column(Float, unique=False, nullable=True)\n acidity = Column(Float, unique=False, nullable=True)\n body = Column(Float, unique=False, nullable=True)\n balance = Column(Float, unique=False, nullable=True)\n uniformity = Column(Float, unique=False, nullable=True)\n cleancup = Column(Float, unique=False, nullable=True)\n sweetness = Column(Float, unique=False, nullable=True)\n total_cup_point = Column(Float, unique=False, nullable=True)\n moisture = Column(Float, unique=False, nullable=True)\n color = Column(String(100), unique=False, nullable=True)\n cluster = Column(Integer, unique=False, nullable=True)\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\n<function token>\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n\n\nclass BeanAttributes(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __repr__(self):\n return '<BeanAttributes %r>' % self.id\n\n\n<function token>\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n\n\nclass BeanAttributes(Base):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<import token>\n<code token>\n<import token>\n<code token>\n<assignment token>\n<class token>\n<function token>\n<code token>\n"
] | false |
9,755 |
388904b6b826a1c718b85f2951a3189bb5abea2a
|
# import adafruit_ads1x15 as adс
# from adafruit_ads1x15 import ads1x15 as adc
# from adafruit_ads1x15 import analog_in
import time
import busio
import board
from adafruit_ads1x15 import ads1015 as ADS
from adafruit_ads1x15.analog_in import AnalogIn
i2c = busio.I2C(board.SCL, board.SDA)
ads = ADS.ADS1015(i2c)
chan = AnalogIn(ads, ADS.P0)
print("{:>5}\t{:>5}".format('raw', 'v'))
while True:
print("{:>5}\t{:>5.3f}".format(chan.value, chan.voltage))
time.sleep(0.5)
# print(dir(analog_in.AnalogIn()))
# analog_in.AnalogIn()
# GAIN = 1
# a = adc
#
# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*range(4)))
# print('-' * 37)
# # Main loop.
# while True:
# # Read all the ADC channel values in a list.
# values = [0]*4
# for i in range(4):
# # Read the specified ADC channel using the previously set gain value.
# values[i] = a.read_adc(i, gain=GAIN)
#
# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*values))
# # Pause for half a second.
# time.sleep(0.5)
|
[
"# import adafruit_ads1x15 as adс\r\n# from adafruit_ads1x15 import ads1x15 as adc\r\n# from adafruit_ads1x15 import analog_in\r\nimport time\r\nimport busio\r\nimport board\r\nfrom adafruit_ads1x15 import ads1015 as ADS\r\nfrom adafruit_ads1x15.analog_in import AnalogIn\r\n\r\ni2c = busio.I2C(board.SCL, board.SDA)\r\nads = ADS.ADS1015(i2c)\r\nchan = AnalogIn(ads, ADS.P0)\r\n\r\nprint(\"{:>5}\\t{:>5}\".format('raw', 'v'))\r\n\r\nwhile True:\r\n print(\"{:>5}\\t{:>5.3f}\".format(chan.value, chan.voltage))\r\n time.sleep(0.5)\r\n\r\n# print(dir(analog_in.AnalogIn()))\r\n\r\n# analog_in.AnalogIn()\r\n\r\n\r\n# GAIN = 1\r\n# a = adc\r\n#\r\n# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*range(4)))\r\n# print('-' * 37)\r\n# # Main loop.\r\n# while True:\r\n# # Read all the ADC channel values in a list.\r\n# values = [0]*4\r\n# for i in range(4):\r\n# # Read the specified ADC channel using the previously set gain value.\r\n# values[i] = a.read_adc(i, gain=GAIN)\r\n#\r\n# print('| {0:>6} | {1:>6} | {2:>6} | {3:>6} |'.format(*values))\r\n# # Pause for half a second.\r\n# time.sleep(0.5)\r\n",
"import time\nimport busio\nimport board\nfrom adafruit_ads1x15 import ads1015 as ADS\nfrom adafruit_ads1x15.analog_in import AnalogIn\ni2c = busio.I2C(board.SCL, board.SDA)\nads = ADS.ADS1015(i2c)\nchan = AnalogIn(ads, ADS.P0)\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n",
"<import token>\ni2c = busio.I2C(board.SCL, board.SDA)\nads = ADS.ADS1015(i2c)\nchan = AnalogIn(ads, ADS.P0)\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n",
"<import token>\n<assignment token>\nprint('{:>5}\\t{:>5}'.format('raw', 'v'))\nwhile True:\n print('{:>5}\\t{:>5.3f}'.format(chan.value, chan.voltage))\n time.sleep(0.5)\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
9,756 |
01128ebd156b24791548c50c92d2fc1969c42e70
|
import numpy as np
import sklearn.cluster as sc
import sklearn.metrics as sm
import matplotlib.pyplot as mp
x = np.loadtxt('C:\\Users\\Administrator\\Desktop\\sucai\\ml_data\\perf.txt', delimiter=',')
# 准备训练模型相关数据
epsilons, scores, models = np.linspace(0.3, 1.2, 10), [], []
# 遍历所有的半径,训练模型,查看得分
for epsilon in epsilons:
model = sc.DBSCAN(eps=epsilon, min_samples=5)
model.fit(x)
score = sm.silhouette_score(x, model.labels_, sample_size=len(x), metric='euclidean')
scores.append(score)
models.append(model)
# 转成ndarray数组
scores = np.array(scores)
best_i = scores.argmax() # 最优分数
best_eps = epsilons[best_i]
best_sco = scores[best_i]
# 获取最优模型
best_model = models[best_i]
# 对输入x进行预测得到预测类别
pred_y = best_model.fit_predict(x)
# 获取孤立样本,外周样本,核心样本
core_mask = np.zeros(len(x), dtype=bool)
# 获取核心样本的索引,把对应位置的元素改为True
core_mask[best_model.core_sample_indices_] = True
# 孤立样本的类别标签为-1
offset_mask = best_model.labels_ == -1
# 外周样本掩码(不是核心也不是孤立样本)
p_mask = ~(core_mask | offset_mask)
# 绘制这些样本数据
mp.figure('DBSCAN cluster', facecolor='lightgray')
mp.title('DBSCAN cluster', fontsize=16)
mp.xlabel('x', fontsize=14)
mp.ylabel('y', fontsize=14)
mp.tick_params(labelsize=10)
# 绘制核心样本
mp.scatter(x[core_mask][:, 0], x[core_mask][:, 1], s=60, cmap='brg', c=pred_y[core_mask])
# 绘制外周样本
mp.scatter(x[p_mask][:, 0], x[p_mask][:, 1], s=60, cmap='brg', c=pred_y[p_mask], alpha=0.5)
# 绘制孤立样本
mp.scatter(x[offset_mask][:, 0], x[offset_mask][:, 1], s=60, c='gray')
mp.show()
|
[
"import numpy as np \nimport sklearn.cluster as sc \nimport sklearn.metrics as sm \nimport matplotlib.pyplot as mp \n\nx = np.loadtxt('C:\\\\Users\\\\Administrator\\\\Desktop\\\\sucai\\\\ml_data\\\\perf.txt', delimiter=',')\n\n# 准备训练模型相关数据\nepsilons, scores, models = np.linspace(0.3, 1.2, 10), [], []\n\n# 遍历所有的半径,训练模型,查看得分\nfor epsilon in epsilons:\n model = sc.DBSCAN(eps=epsilon, min_samples=5)\n model.fit(x)\n score = sm.silhouette_score(x, model.labels_, sample_size=len(x), metric='euclidean')\n scores.append(score)\n models.append(model)\n\n# 转成ndarray数组\nscores = np.array(scores)\nbest_i = scores.argmax() # 最优分数\nbest_eps = epsilons[best_i]\nbest_sco = scores[best_i]\n\n# 获取最优模型\nbest_model = models[best_i]\n# 对输入x进行预测得到预测类别\npred_y = best_model.fit_predict(x)\n# 获取孤立样本,外周样本,核心样本\ncore_mask = np.zeros(len(x), dtype=bool)\n# 获取核心样本的索引,把对应位置的元素改为True\ncore_mask[best_model.core_sample_indices_] = True\n# 孤立样本的类别标签为-1\noffset_mask = best_model.labels_ == -1\n# 外周样本掩码(不是核心也不是孤立样本)\np_mask = ~(core_mask | offset_mask)\n# 绘制这些样本数据\nmp.figure('DBSCAN cluster', facecolor='lightgray')\nmp.title('DBSCAN cluster', fontsize=16)\nmp.xlabel('x', fontsize=14)\nmp.ylabel('y', fontsize=14)\nmp.tick_params(labelsize=10)\n# 绘制核心样本\nmp.scatter(x[core_mask][:, 0], x[core_mask][:, 1], s=60, cmap='brg', c=pred_y[core_mask])\n# 绘制外周样本\nmp.scatter(x[p_mask][:, 0], x[p_mask][:, 1], s=60, cmap='brg', c=pred_y[p_mask], alpha=0.5)\n# 绘制孤立样本\nmp.scatter(x[offset_mask][:, 0], x[offset_mask][:, 1], s=60, c='gray')\nmp.show()",
"import numpy as np\nimport sklearn.cluster as sc\nimport sklearn.metrics as sm\nimport matplotlib.pyplot as mp\nx = np.loadtxt('C:\\\\Users\\\\Administrator\\\\Desktop\\\\sucai\\\\ml_data\\\\perf.txt',\n delimiter=',')\nepsilons, scores, models = np.linspace(0.3, 1.2, 10), [], []\nfor epsilon in epsilons:\n model = sc.DBSCAN(eps=epsilon, min_samples=5)\n model.fit(x)\n score = sm.silhouette_score(x, model.labels_, sample_size=len(x),\n metric='euclidean')\n scores.append(score)\n models.append(model)\nscores = np.array(scores)\nbest_i = scores.argmax()\nbest_eps = epsilons[best_i]\nbest_sco = scores[best_i]\nbest_model = models[best_i]\npred_y = best_model.fit_predict(x)\ncore_mask = np.zeros(len(x), dtype=bool)\ncore_mask[best_model.core_sample_indices_] = True\noffset_mask = best_model.labels_ == -1\np_mask = ~(core_mask | offset_mask)\nmp.figure('DBSCAN cluster', facecolor='lightgray')\nmp.title('DBSCAN cluster', fontsize=16)\nmp.xlabel('x', fontsize=14)\nmp.ylabel('y', fontsize=14)\nmp.tick_params(labelsize=10)\nmp.scatter(x[core_mask][:, 0], x[core_mask][:, 1], s=60, cmap='brg', c=\n pred_y[core_mask])\nmp.scatter(x[p_mask][:, 0], x[p_mask][:, 1], s=60, cmap='brg', c=pred_y[\n p_mask], alpha=0.5)\nmp.scatter(x[offset_mask][:, 0], x[offset_mask][:, 1], s=60, c='gray')\nmp.show()\n",
"<import token>\nx = np.loadtxt('C:\\\\Users\\\\Administrator\\\\Desktop\\\\sucai\\\\ml_data\\\\perf.txt',\n delimiter=',')\nepsilons, scores, models = np.linspace(0.3, 1.2, 10), [], []\nfor epsilon in epsilons:\n model = sc.DBSCAN(eps=epsilon, min_samples=5)\n model.fit(x)\n score = sm.silhouette_score(x, model.labels_, sample_size=len(x),\n metric='euclidean')\n scores.append(score)\n models.append(model)\nscores = np.array(scores)\nbest_i = scores.argmax()\nbest_eps = epsilons[best_i]\nbest_sco = scores[best_i]\nbest_model = models[best_i]\npred_y = best_model.fit_predict(x)\ncore_mask = np.zeros(len(x), dtype=bool)\ncore_mask[best_model.core_sample_indices_] = True\noffset_mask = best_model.labels_ == -1\np_mask = ~(core_mask | offset_mask)\nmp.figure('DBSCAN cluster', facecolor='lightgray')\nmp.title('DBSCAN cluster', fontsize=16)\nmp.xlabel('x', fontsize=14)\nmp.ylabel('y', fontsize=14)\nmp.tick_params(labelsize=10)\nmp.scatter(x[core_mask][:, 0], x[core_mask][:, 1], s=60, cmap='brg', c=\n pred_y[core_mask])\nmp.scatter(x[p_mask][:, 0], x[p_mask][:, 1], s=60, cmap='brg', c=pred_y[\n p_mask], alpha=0.5)\nmp.scatter(x[offset_mask][:, 0], x[offset_mask][:, 1], s=60, c='gray')\nmp.show()\n",
"<import token>\n<assignment token>\nfor epsilon in epsilons:\n model = sc.DBSCAN(eps=epsilon, min_samples=5)\n model.fit(x)\n score = sm.silhouette_score(x, model.labels_, sample_size=len(x),\n metric='euclidean')\n scores.append(score)\n models.append(model)\n<assignment token>\nmp.figure('DBSCAN cluster', facecolor='lightgray')\nmp.title('DBSCAN cluster', fontsize=16)\nmp.xlabel('x', fontsize=14)\nmp.ylabel('y', fontsize=14)\nmp.tick_params(labelsize=10)\nmp.scatter(x[core_mask][:, 0], x[core_mask][:, 1], s=60, cmap='brg', c=\n pred_y[core_mask])\nmp.scatter(x[p_mask][:, 0], x[p_mask][:, 1], s=60, cmap='brg', c=pred_y[\n p_mask], alpha=0.5)\nmp.scatter(x[offset_mask][:, 0], x[offset_mask][:, 1], s=60, c='gray')\nmp.show()\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,757 |
9cad36de6231f310ef9022f16f6ed0da83a003b3
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 6 12:20:45 2017
@author: 7
"""
from os import listdir
from PIL import Image as PImage
from scipy import misc
import numpy as np
from Image_loader import LoadImages
"""
def LoadImages(path):
# return array of images
imagesList = listdir(path)
loadedImages = []
for image in imagesList:
img = misc.imread(path + image)
loadedImages.append(img)
return loadedImages
"""
def ModifyImages(path,path1):
# modify images to same scale
imagesList = listdir(path)
for image in imagesList:
old_img = PImage.open(path + image)
old_size = old_img.size
new_size = (540,420)
new_img = PImage.new("L", new_size)
new_img.paste(old_img,((new_size[0]-old_size[0])//2,(new_size[1]-old_size[1])//2))
new_img.save(path1 + image)
"""
path = "train\\"
path1 = "train_modified\\"
ModifyImages(path,path1)
imgs = LoadImages(path1)
a = np.array( imgs )
print (a.shape)
print("finished")
path = "test\\"
path1 = "test_modified\\"
ModifyImages(path,path1)
imgs = LoadImages(path1)
a = np.array( imgs )
print (a.shape)
print("finished")
path = "train_cleaned\\"
path1 = "train_cleaned_modified\\"
ModifyImages(path,path1)
imgs = LoadImages(path1)
a = np.array( imgs )
print (a.shape)
print("finished")
"""
|
[
"# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Mon Mar 6 12:20:45 2017\r\n\r\n@author: 7\r\n\"\"\"\r\n\r\nfrom os import listdir\r\nfrom PIL import Image as PImage\r\nfrom scipy import misc\r\nimport numpy as np\r\nfrom Image_loader import LoadImages\r\n\"\"\"\r\ndef LoadImages(path):\r\n # return array of images\r\n imagesList = listdir(path)\r\n loadedImages = []\r\n for image in imagesList:\r\n img = misc.imread(path + image)\r\n loadedImages.append(img)\r\n return loadedImages\r\n\"\"\"\r\n\r\n\r\ndef ModifyImages(path,path1):\r\n # modify images to same scale\r\n\r\n imagesList = listdir(path)\r\n for image in imagesList:\r\n old_img = PImage.open(path + image)\r\n old_size = old_img.size\r\n new_size = (540,420)\r\n new_img = PImage.new(\"L\", new_size) \r\n new_img.paste(old_img,((new_size[0]-old_size[0])//2,(new_size[1]-old_size[1])//2))\r\n new_img.save(path1 + image)\r\n\r\n\"\"\"\r\npath = \"train\\\\\"\r\npath1 = \"train_modified\\\\\"\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\r\n\r\npath = \"test\\\\\"\r\npath1 = \"test_modified\\\\\"\r\n\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\r\npath = \"train_cleaned\\\\\"\r\npath1 = \"train_cleaned_modified\\\\\"\r\n\r\nModifyImages(path,path1)\r\nimgs = LoadImages(path1)\r\na = np.array( imgs )\r\nprint (a.shape)\r\nprint(\"finished\")\r\n\"\"\"",
"<docstring token>\nfrom os import listdir\nfrom PIL import Image as PImage\nfrom scipy import misc\nimport numpy as np\nfrom Image_loader import LoadImages\n<docstring token>\n\n\ndef ModifyImages(path, path1):\n imagesList = listdir(path)\n for image in imagesList:\n old_img = PImage.open(path + image)\n old_size = old_img.size\n new_size = 540, 420\n new_img = PImage.new('L', new_size)\n new_img.paste(old_img, ((new_size[0] - old_size[0]) // 2, (new_size\n [1] - old_size[1]) // 2))\n new_img.save(path1 + image)\n\n\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n\n\ndef ModifyImages(path, path1):\n imagesList = listdir(path)\n for image in imagesList:\n old_img = PImage.open(path + image)\n old_size = old_img.size\n new_size = 540, 420\n new_img = PImage.new('L', new_size)\n new_img.paste(old_img, ((new_size[0] - old_size[0]) // 2, (new_size\n [1] - old_size[1]) // 2))\n new_img.save(path1 + image)\n\n\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n<function token>\n<docstring token>\n"
] | false |
9,758 |
0fb424dafaac184882ea56f36265e0b19b5a4c50
|
import torch
import torch.nn.functional as f
import time
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
import numpy as np
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment this to run on GPU
N, D_in, H, D_out = 64, 1000, 100, 10
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)
model = torch.nn.Sequential(
torch.nn.Linear(D_in, H),
torch.nn.ReLU(),
torch.nn.Linear(H, D_out),
)
def plot_grad_flow(named_parameters):
'''Plots the gradients flowing through different layers in the net during training.
Can be used for checking for possible gradient vanishing / exploding problems.
Usage: Plug this function in Trainer class after loss.backwards() as
"plot_grad_flow(self.model.named_parameters())" to visualize the gradient flow'''
ave_grads = []
max_grads = []
layers = []
for n, p in named_parameters:
if (p.requires_grad) and ("bias" not in n):
layers.append(n)
ave_grads.append(p.grad.abs().mean())
max_grads.append(p.grad.abs().max())
plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color="c")
plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color="b")
plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color="k")
plt.xticks(range(0, len(ave_grads), 1), layers, rotation="vertical")
plt.xlim(left=0, right=len(ave_grads))
plt.ylim(bottom=-0.001, top=0.02) # zoom in on the lower gradient regions
plt.xlabel("Layers")
plt.ylabel("average gradient")
plt.title("Gradient flow")
plt.grid(True)
plt.legend([Line2D([0], [0], color="c", lw=4),
Line2D([0], [0], color="b", lw=4),
Line2D([0], [0], color="k", lw=4)], ['max-gradient', 'mean-gradient', 'zero-gradient'])
plt.show()
learning_rate = 1e-6
y_pred = model(x)
loss = (y_pred - y).pow(2).sum()
loss.backward()
plot_grad_flow(model.named_parameters())
|
[
"\nimport torch\nimport torch.nn.functional as f\nimport time\nimport matplotlib.pyplot as plt\nfrom matplotlib.lines import Line2D\nimport numpy as np\n\ndtype = torch.float\ndevice = torch.device(\"cpu\")\n# device = torch.device(\"cuda:0\") # Uncomment this to run on GPU\n\nN, D_in, H, D_out = 64, 1000, 100, 10\n\nx = torch.randn(N, D_in, device=device, dtype=dtype)\ny = torch.randn(N, D_out, device=device, dtype=dtype)\n\nmodel = torch.nn.Sequential(\n torch.nn.Linear(D_in, H),\n torch.nn.ReLU(),\n torch.nn.Linear(H, D_out),\n)\n\n\ndef plot_grad_flow(named_parameters):\n '''Plots the gradients flowing through different layers in the net during training.\n Can be used for checking for possible gradient vanishing / exploding problems.\n\n Usage: Plug this function in Trainer class after loss.backwards() as\n \"plot_grad_flow(self.model.named_parameters())\" to visualize the gradient flow'''\n ave_grads = []\n max_grads = []\n layers = []\n for n, p in named_parameters:\n if (p.requires_grad) and (\"bias\" not in n):\n layers.append(n)\n ave_grads.append(p.grad.abs().mean())\n max_grads.append(p.grad.abs().max())\n plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color=\"c\")\n plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color=\"b\")\n plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color=\"k\")\n plt.xticks(range(0, len(ave_grads), 1), layers, rotation=\"vertical\")\n plt.xlim(left=0, right=len(ave_grads))\n plt.ylim(bottom=-0.001, top=0.02) # zoom in on the lower gradient regions\n plt.xlabel(\"Layers\")\n plt.ylabel(\"average gradient\")\n plt.title(\"Gradient flow\")\n plt.grid(True)\n plt.legend([Line2D([0], [0], color=\"c\", lw=4),\n Line2D([0], [0], color=\"b\", lw=4),\n Line2D([0], [0], color=\"k\", lw=4)], ['max-gradient', 'mean-gradient', 'zero-gradient'])\n\n plt.show()\n\nlearning_rate = 1e-6\ny_pred = model(x)\nloss = (y_pred - y).pow(2).sum()\nloss.backward()\nplot_grad_flow(model.named_parameters())\n",
"import torch\nimport torch.nn.functional as f\nimport time\nimport matplotlib.pyplot as plt\nfrom matplotlib.lines import Line2D\nimport numpy as np\ndtype = torch.float\ndevice = torch.device('cpu')\nN, D_in, H, D_out = 64, 1000, 100, 10\nx = torch.randn(N, D_in, device=device, dtype=dtype)\ny = torch.randn(N, D_out, device=device, dtype=dtype)\nmodel = torch.nn.Sequential(torch.nn.Linear(D_in, H), torch.nn.ReLU(),\n torch.nn.Linear(H, D_out))\n\n\ndef plot_grad_flow(named_parameters):\n \"\"\"Plots the gradients flowing through different layers in the net during training.\n Can be used for checking for possible gradient vanishing / exploding problems.\n\n Usage: Plug this function in Trainer class after loss.backwards() as\n \"plot_grad_flow(self.model.named_parameters())\" to visualize the gradient flow\"\"\"\n ave_grads = []\n max_grads = []\n layers = []\n for n, p in named_parameters:\n if p.requires_grad and 'bias' not in n:\n layers.append(n)\n ave_grads.append(p.grad.abs().mean())\n max_grads.append(p.grad.abs().max())\n plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color='c')\n plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color='b')\n plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color='k')\n plt.xticks(range(0, len(ave_grads), 1), layers, rotation='vertical')\n plt.xlim(left=0, right=len(ave_grads))\n plt.ylim(bottom=-0.001, top=0.02)\n plt.xlabel('Layers')\n plt.ylabel('average gradient')\n plt.title('Gradient flow')\n plt.grid(True)\n plt.legend([Line2D([0], [0], color='c', lw=4), Line2D([0], [0], color=\n 'b', lw=4), Line2D([0], [0], color='k', lw=4)], ['max-gradient',\n 'mean-gradient', 'zero-gradient'])\n plt.show()\n\n\nlearning_rate = 1e-06\ny_pred = model(x)\nloss = (y_pred - y).pow(2).sum()\nloss.backward()\nplot_grad_flow(model.named_parameters())\n",
"<import token>\ndtype = torch.float\ndevice = torch.device('cpu')\nN, D_in, H, D_out = 64, 1000, 100, 10\nx = torch.randn(N, D_in, device=device, dtype=dtype)\ny = torch.randn(N, D_out, device=device, dtype=dtype)\nmodel = torch.nn.Sequential(torch.nn.Linear(D_in, H), torch.nn.ReLU(),\n torch.nn.Linear(H, D_out))\n\n\ndef plot_grad_flow(named_parameters):\n \"\"\"Plots the gradients flowing through different layers in the net during training.\n Can be used for checking for possible gradient vanishing / exploding problems.\n\n Usage: Plug this function in Trainer class after loss.backwards() as\n \"plot_grad_flow(self.model.named_parameters())\" to visualize the gradient flow\"\"\"\n ave_grads = []\n max_grads = []\n layers = []\n for n, p in named_parameters:\n if p.requires_grad and 'bias' not in n:\n layers.append(n)\n ave_grads.append(p.grad.abs().mean())\n max_grads.append(p.grad.abs().max())\n plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color='c')\n plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color='b')\n plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color='k')\n plt.xticks(range(0, len(ave_grads), 1), layers, rotation='vertical')\n plt.xlim(left=0, right=len(ave_grads))\n plt.ylim(bottom=-0.001, top=0.02)\n plt.xlabel('Layers')\n plt.ylabel('average gradient')\n plt.title('Gradient flow')\n plt.grid(True)\n plt.legend([Line2D([0], [0], color='c', lw=4), Line2D([0], [0], color=\n 'b', lw=4), Line2D([0], [0], color='k', lw=4)], ['max-gradient',\n 'mean-gradient', 'zero-gradient'])\n plt.show()\n\n\nlearning_rate = 1e-06\ny_pred = model(x)\nloss = (y_pred - y).pow(2).sum()\nloss.backward()\nplot_grad_flow(model.named_parameters())\n",
"<import token>\n<assignment token>\n\n\ndef plot_grad_flow(named_parameters):\n \"\"\"Plots the gradients flowing through different layers in the net during training.\n Can be used for checking for possible gradient vanishing / exploding problems.\n\n Usage: Plug this function in Trainer class after loss.backwards() as\n \"plot_grad_flow(self.model.named_parameters())\" to visualize the gradient flow\"\"\"\n ave_grads = []\n max_grads = []\n layers = []\n for n, p in named_parameters:\n if p.requires_grad and 'bias' not in n:\n layers.append(n)\n ave_grads.append(p.grad.abs().mean())\n max_grads.append(p.grad.abs().max())\n plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color='c')\n plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color='b')\n plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color='k')\n plt.xticks(range(0, len(ave_grads), 1), layers, rotation='vertical')\n plt.xlim(left=0, right=len(ave_grads))\n plt.ylim(bottom=-0.001, top=0.02)\n plt.xlabel('Layers')\n plt.ylabel('average gradient')\n plt.title('Gradient flow')\n plt.grid(True)\n plt.legend([Line2D([0], [0], color='c', lw=4), Line2D([0], [0], color=\n 'b', lw=4), Line2D([0], [0], color='k', lw=4)], ['max-gradient',\n 'mean-gradient', 'zero-gradient'])\n plt.show()\n\n\n<assignment token>\nloss.backward()\nplot_grad_flow(model.named_parameters())\n",
"<import token>\n<assignment token>\n\n\ndef plot_grad_flow(named_parameters):\n \"\"\"Plots the gradients flowing through different layers in the net during training.\n Can be used for checking for possible gradient vanishing / exploding problems.\n\n Usage: Plug this function in Trainer class after loss.backwards() as\n \"plot_grad_flow(self.model.named_parameters())\" to visualize the gradient flow\"\"\"\n ave_grads = []\n max_grads = []\n layers = []\n for n, p in named_parameters:\n if p.requires_grad and 'bias' not in n:\n layers.append(n)\n ave_grads.append(p.grad.abs().mean())\n max_grads.append(p.grad.abs().max())\n plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color='c')\n plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color='b')\n plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color='k')\n plt.xticks(range(0, len(ave_grads), 1), layers, rotation='vertical')\n plt.xlim(left=0, right=len(ave_grads))\n plt.ylim(bottom=-0.001, top=0.02)\n plt.xlabel('Layers')\n plt.ylabel('average gradient')\n plt.title('Gradient flow')\n plt.grid(True)\n plt.legend([Line2D([0], [0], color='c', lw=4), Line2D([0], [0], color=\n 'b', lw=4), Line2D([0], [0], color='k', lw=4)], ['max-gradient',\n 'mean-gradient', 'zero-gradient'])\n plt.show()\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
9,759 |
09d32b48ae88b1066dd0aa435a351c4fb1fc04ec
|
from flask import Flask, request, render_template
from random import choice, sample
app = Flask(__name__)
horoscopes = [
'your day will be awesome',
'your day will be terrific',
'your day will be fantastic',
'neato, you have a fantabulous day ahead',
'your day will be oh-so-not-meh',
'this day will be brilliant',
'looks like today is just ducky',
'I proclaim your day to be INCREDIBLE',
'this day will be wonderful',
'smash this day',
'this day shall be lovely',
'your day will be just satenacious']
@app.route('/')
def index():
"""Show the homepage and ask the user's name."""
return render_template('index.html')
@app.route('/horoscope')
def get_horoscope():
"""Give the user a horoscope"""
name = request.args.get('name')
num_horoscopes = int(request.args.get('num_horoscopes'))
show_horoscopes = request.args.get('show_horoscopes')
horoscopes_to_show = sample(horoscopes, num_horoscopes)
# predictions = ', '.join(sample(horoscopes, num_horoscopes))
return render_template(
'horoscopes.html',
name=name,
show_horoscopes=show_horoscopes,
horoscopes_to_show=horoscopes_to_show))
"""
if show_horoscopes:
return f"Hello there, {name}: {predictions}."
else:
return f"Hello there, {name}! Have a nice day!"
"""
if __name__ == "__main__":
app.run(debug=True)
|
[
"from flask import Flask, request, render_template\nfrom random import choice, sample\n\napp = Flask(__name__)\n\nhoroscopes = [\n 'your day will be awesome',\n 'your day will be terrific',\n 'your day will be fantastic',\n 'neato, you have a fantabulous day ahead',\n 'your day will be oh-so-not-meh',\n 'this day will be brilliant',\n 'looks like today is just ducky',\n 'I proclaim your day to be INCREDIBLE',\n 'this day will be wonderful',\n 'smash this day',\n 'this day shall be lovely',\n 'your day will be just satenacious']\n\n\[email protected]('/')\ndef index():\n \"\"\"Show the homepage and ask the user's name.\"\"\"\n return render_template('index.html')\n\n\[email protected]('/horoscope')\ndef get_horoscope():\n \"\"\"Give the user a horoscope\"\"\"\n name = request.args.get('name')\n num_horoscopes = int(request.args.get('num_horoscopes'))\n show_horoscopes = request.args.get('show_horoscopes')\n horoscopes_to_show = sample(horoscopes, num_horoscopes)\n # predictions = ', '.join(sample(horoscopes, num_horoscopes))\n\n return render_template(\n 'horoscopes.html',\n name=name,\n show_horoscopes=show_horoscopes,\n horoscopes_to_show=horoscopes_to_show))\n\n\"\"\"\n if show_horoscopes:\n return f\"Hello there, {name}: {predictions}.\"\n else:\n return f\"Hello there, {name}! Have a nice day!\"\n\"\"\"\n\nif __name__ == \"__main__\":\n app.run(debug=True)\n"
] | true |
9,760 |
5a895c864c496e1073d75937909c994432a71d75
|
import socket
import json
from typing import Dict
listadionica = ["GS", "MS", "WFC", "VALBZ", "BOND", "VALE", "XLF"]
class Burza:
def __init__ (self, test):
if test:
host_name = "test-exch-partitivnisumari"
port = 25000
else:
host_name = "production"
port = 25000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host_name, port))
self.stream = s.makefile('rw', 1)
self.zapisi(("type": "hello", "team": 'PARTITIVNISUMARI'))
assert self.citaj()['type'] == 'hello'
self.order_id = 0
def citaj(self, store_last=True):
data = self.stream.readline()
if(data == ""):
return None
else:
data = json.loads(data)
self.last_data = data
!!!
return data
def zapisi(self, data):
json.dump(data, self.stream)
self.stream.write("\n")
def kupi(self, buy_sell, symbol, price, size):
trade = {'type': 'add', 'order_id': self.order_id,
'symbol': symbol, 'dir': buy_sell, 'price': price, 'size': size}
self.order_id += 1
if buy_sell == "SELL":
self.zapisi(trade)
!!!
elif buy_sell == "BUY":
self.zapisi(trade)
!!!
def logger(dicc, ord):
if ord['type'] == 'book':
buy = ord['buy']
sell = ord['sell']
count_buy = 0
value_buy = 0
for p, n in buy:
value_buy += p * n
count_buy += n
count_sell = 0
value_sell = 0
for p, n in sell:
value_sell += p * n
count_sell += n
if count_buy != 0 and count_sell != 0:
dicc[ord['symbol']].append((value_buy//count_buy, value_sell//count_sell))
def logN(burza, n):
dicc = {}
readed_results = []
for i in range(n):
readed_results.append(burza.citaj())
for ord in readed_results:
if ord['type'] == 'book':
buy = ord['buy']
sell = ord['sell']
count_buy = 0
value_buy = 0
for p, n in buy:
value_buy +=
|
[
"import socket\nimport json\n\nfrom typing import Dict\n\nlistadionica = [\"GS\", \"MS\", \"WFC\", \"VALBZ\", \"BOND\", \"VALE\", \"XLF\"]\n\nclass Burza:\n def __init__ (self, test):\n\n if test:\n host_name = \"test-exch-partitivnisumari\"\n port = 25000\n else:\n host_name = \"production\"\n port = 25000\n\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host_name, port))\n self.stream = s.makefile('rw', 1)\n\n self.zapisi((\"type\": \"hello\", \"team\": 'PARTITIVNISUMARI'))\n assert self.citaj()['type'] == 'hello'\n self.order_id = 0\n\n def citaj(self, store_last=True):\n data = self.stream.readline()\n if(data == \"\"):\n return None\n else:\n data = json.loads(data)\n self.last_data = data\n !!!\n return data\n\n def zapisi(self, data):\n json.dump(data, self.stream)\n self.stream.write(\"\\n\")\n\n def kupi(self, buy_sell, symbol, price, size):\n trade = {'type': 'add', 'order_id': self.order_id,\n 'symbol': symbol, 'dir': buy_sell, 'price': price, 'size': size}\n self.order_id += 1\n\n if buy_sell == \"SELL\":\n self.zapisi(trade)\n !!!\n elif buy_sell == \"BUY\":\n self.zapisi(trade)\n !!!\n\ndef logger(dicc, ord):\n if ord['type'] == 'book':\n buy = ord['buy']\n sell = ord['sell']\n\n count_buy = 0\n value_buy = 0\n for p, n in buy:\n value_buy += p * n\n count_buy += n\n\n count_sell = 0\n value_sell = 0\n for p, n in sell:\n value_sell += p * n\n count_sell += n\n if count_buy != 0 and count_sell != 0:\n dicc[ord['symbol']].append((value_buy//count_buy, value_sell//count_sell))\n\ndef logN(burza, n):\n dicc = {}\n readed_results = []\n for i in range(n):\n readed_results.append(burza.citaj())\n for ord in readed_results:\n if ord['type'] == 'book':\n buy = ord['buy']\n sell = ord['sell']\n\n count_buy = 0\n value_buy = 0\n for p, n in buy:\n value_buy +="
] | true |
9,761 |
1ae69eaaa08a0045faad13281a6a3de8f7529c7a
|
# -*- coding: utf-8 -*-
import csv
import datetime
from django.conf import settings
from django.contrib import admin
from django.http import HttpResponse
from django.utils.encoding import smart_str
from djforms.scholars.models import *
def export_scholars(modeladmin, request, queryset):
"""Export the presentation data."""
response = HttpResponse('', content_type='text/csv; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename=cos.csv'
writer = csv.writer(response)
writer.writerow([
'Title',
'Reviewer',
'Leader',
'Leader Email',
'Sponsor',
'Other Sponsor',
'Presenters',
'Funding Source',
'Work Type',
'Permission to Reproduce',
'Faculty Sponsor Approval',
'Table',
'Electricity',
'Link',
'Poster',
'Date created',
])
for presentation in queryset:
link = 'http://{0}{1}'.format(
settings.SERVER_URL,
presentation.get_absolute_url(),
)
poster = 'http://{0}/assets/{1}'.format(
settings.SERVER_URL, presentation.poster_file,
)
try:
leader = '{0}, {1}'.format(
presentation.leader.last_name,
presentation.leader.first_name,
)
except Exception:
leader = ''
presenters = ''
for presenter in presentation.presenters.all():
if not presenter.leader:
presenters += '{0}, {1}|'.format(
presenter.last_name, presenter.first_name,
)
title = smart_str(
presentation.title,
encoding='utf-8',
strings_only=False,
errors='strict',
)
funding = smart_str(
presentation.funding,
encoding='utf-8',
strings_only=False,
errors='strict',
)
work_type = smart_str(
presentation.work_type,
encoding='utf-8',
strings_only=False,
errors='strict',
)
sponsor_email = ''
if presentation.leader:
sponsor_email = presentation.leader.sponsor_email
sponsor_other = presentation.leader.sponsor_other
writer.writerow([
title,
presentation.reviewer,
leader,
presentation.user.email,
sponsor_email,
sponsor_other,
presenters[:-1],
funding,
work_type,
presentation.permission,
presentation.shared,
presentation.need_table,
presentation.need_electricity,
link,poster,
presentation.date_created,
])
return response
export_scholars.short_description = """
Export the selected Celebration of Scholars Submissions
"""
class PresentationAdmin(admin.ModelAdmin):
"""Admin class for the presentation data model."""
model = Presentation
actions = [export_scholars]
raw_id_fields = ('user', 'updated_by', 'leader')
list_max_show_all = 500
list_per_page = 500
list_display = (
'title',
'reviewer',
'last_name',
'first_name',
'email',
'sponsor',
'sponsor_other',
'get_presenters',
'funding',
'work_type',
'permission',
'shared',
'need_table',
'need_electricity',
'status',
'poster',
'date_created',
)
ordering = [
'-date_created',
'title',
'work_type',
'permission',
'shared',
'need_table',
'need_electricity',
'status',
]
search_fields = (
'title',
'user__last_name',
'user__email',
'funding',
)
list_filter = ('status', 'date_created')
list_editable = ['reviewer']
def queryset(self, request):
"""Only show presentations that were created after a certain date."""
TODAY = datetime.date.today()
YEAR = int(TODAY.year)
qs = super(PresentationAdmin, self).queryset(request)
start_date = datetime.date(YEAR, 1, 1)
return qs.filter(date_created__gte=start_date)
def save_model(self, request, obj, form, change):
"""Override the save method to update some things."""
if change:
obj.updated_by = request.user
obj.save()
class PresenterAdmin(admin.ModelAdmin):
"""Admin class for the presenter model."""
model = Presenter
list_max_show_all = 500
list_per_page = 500
list_display = (
'date_created',
'last_name',
'first_name',
'email',
'leader',
'prez_type',
'college_year',
'major',
'hometown',
'sponsor',
'sponsor_name',
'sponsor_email',
'sponsor_other',
'department',
)
ordering = [
'date_created',
'last_name',
'first_name',
'email',
'leader',
'prez_type',
'college_year',
'major',
'hometown',
'sponsor',
'sponsor_name',
'sponsor_email',
'sponsor_other',
'department',
]
search_fields = (
'last_name',
'first_name',
'email',
)
admin.site.register(Presenter, PresenterAdmin)
admin.site.register(Presentation, PresentationAdmin)
|
[
"# -*- coding: utf-8 -*-\r\n\r\nimport csv\r\nimport datetime\r\n\r\nfrom django.conf import settings\r\nfrom django.contrib import admin\r\nfrom django.http import HttpResponse\r\nfrom django.utils.encoding import smart_str\r\nfrom djforms.scholars.models import *\r\n\r\n\r\ndef export_scholars(modeladmin, request, queryset):\r\n \"\"\"Export the presentation data.\"\"\"\r\n response = HttpResponse('', content_type='text/csv; charset=utf-8')\r\n response['Content-Disposition'] = 'attachment; filename=cos.csv'\r\n writer = csv.writer(response)\r\n writer.writerow([\r\n 'Title',\r\n 'Reviewer',\r\n 'Leader',\r\n 'Leader Email',\r\n 'Sponsor',\r\n 'Other Sponsor',\r\n 'Presenters',\r\n 'Funding Source',\r\n 'Work Type',\r\n 'Permission to Reproduce',\r\n 'Faculty Sponsor Approval',\r\n 'Table',\r\n 'Electricity',\r\n 'Link',\r\n 'Poster',\r\n 'Date created',\r\n ])\r\n for presentation in queryset:\r\n link = 'http://{0}{1}'.format(\r\n settings.SERVER_URL,\r\n presentation.get_absolute_url(),\r\n )\r\n poster = 'http://{0}/assets/{1}'.format(\r\n settings.SERVER_URL, presentation.poster_file,\r\n )\r\n try:\r\n leader = '{0}, {1}'.format(\r\n presentation.leader.last_name,\r\n presentation.leader.first_name,\r\n )\r\n except Exception:\r\n leader = ''\r\n presenters = ''\r\n for presenter in presentation.presenters.all():\r\n if not presenter.leader:\r\n presenters += '{0}, {1}|'.format(\r\n presenter.last_name, presenter.first_name,\r\n )\r\n title = smart_str(\r\n presentation.title,\r\n encoding='utf-8',\r\n strings_only=False,\r\n errors='strict',\r\n )\r\n funding = smart_str(\r\n presentation.funding,\r\n encoding='utf-8',\r\n strings_only=False,\r\n errors='strict',\r\n )\r\n work_type = smart_str(\r\n presentation.work_type,\r\n encoding='utf-8',\r\n strings_only=False,\r\n errors='strict',\r\n )\r\n sponsor_email = ''\r\n if presentation.leader:\r\n sponsor_email = presentation.leader.sponsor_email\r\n sponsor_other = presentation.leader.sponsor_other\r\n writer.writerow([\r\n title,\r\n presentation.reviewer,\r\n leader,\r\n presentation.user.email,\r\n sponsor_email,\r\n sponsor_other,\r\n presenters[:-1],\r\n funding,\r\n work_type,\r\n presentation.permission,\r\n presentation.shared,\r\n presentation.need_table,\r\n presentation.need_electricity,\r\n link,poster,\r\n presentation.date_created,\r\n ])\r\n return response\r\nexport_scholars.short_description = \"\"\"\r\n Export the selected Celebration of Scholars Submissions\r\n\"\"\"\r\n\r\n\r\nclass PresentationAdmin(admin.ModelAdmin):\r\n \"\"\"Admin class for the presentation data model.\"\"\"\r\n\r\n model = Presentation\r\n actions = [export_scholars]\r\n raw_id_fields = ('user', 'updated_by', 'leader')\r\n list_max_show_all = 500\r\n list_per_page = 500\r\n list_display = (\r\n 'title',\r\n 'reviewer',\r\n 'last_name',\r\n 'first_name',\r\n 'email',\r\n 'sponsor',\r\n 'sponsor_other',\r\n 'get_presenters',\r\n 'funding',\r\n 'work_type',\r\n 'permission',\r\n 'shared',\r\n 'need_table',\r\n 'need_electricity',\r\n 'status',\r\n 'poster',\r\n 'date_created',\r\n )\r\n ordering = [\r\n '-date_created',\r\n 'title',\r\n 'work_type',\r\n 'permission',\r\n 'shared',\r\n 'need_table',\r\n 'need_electricity',\r\n 'status',\r\n ]\r\n search_fields = (\r\n 'title',\r\n 'user__last_name',\r\n 'user__email',\r\n 'funding',\r\n )\r\n list_filter = ('status', 'date_created')\r\n list_editable = ['reviewer']\r\n\r\n def queryset(self, request):\r\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\r\n TODAY = datetime.date.today()\r\n YEAR = int(TODAY.year)\r\n qs = super(PresentationAdmin, self).queryset(request)\r\n start_date = datetime.date(YEAR, 1, 1)\r\n return qs.filter(date_created__gte=start_date)\r\n\r\n def save_model(self, request, obj, form, change):\r\n \"\"\"Override the save method to update some things.\"\"\"\r\n if change:\r\n obj.updated_by = request.user\r\n obj.save()\r\n\r\n\r\nclass PresenterAdmin(admin.ModelAdmin):\r\n \"\"\"Admin class for the presenter model.\"\"\"\r\n\r\n model = Presenter\r\n list_max_show_all = 500\r\n list_per_page = 500\r\n list_display = (\r\n 'date_created',\r\n 'last_name',\r\n 'first_name',\r\n 'email',\r\n 'leader',\r\n 'prez_type',\r\n 'college_year',\r\n 'major',\r\n 'hometown',\r\n 'sponsor',\r\n 'sponsor_name',\r\n 'sponsor_email',\r\n 'sponsor_other',\r\n 'department',\r\n )\r\n ordering = [\r\n 'date_created',\r\n 'last_name',\r\n 'first_name',\r\n 'email',\r\n 'leader',\r\n 'prez_type',\r\n 'college_year',\r\n 'major',\r\n 'hometown',\r\n 'sponsor',\r\n 'sponsor_name',\r\n 'sponsor_email',\r\n 'sponsor_other',\r\n 'department',\r\n ]\r\n search_fields = (\r\n 'last_name',\r\n 'first_name',\r\n 'email',\r\n )\r\n\r\n\r\nadmin.site.register(Presenter, PresenterAdmin)\r\nadmin.site.register(Presentation, PresentationAdmin)\r\n",
"import csv\nimport datetime\nfrom django.conf import settings\nfrom django.contrib import admin\nfrom django.http import HttpResponse\nfrom django.utils.encoding import smart_str\nfrom djforms.scholars.models import *\n\n\ndef export_scholars(modeladmin, request, queryset):\n \"\"\"Export the presentation data.\"\"\"\n response = HttpResponse('', content_type='text/csv; charset=utf-8')\n response['Content-Disposition'] = 'attachment; filename=cos.csv'\n writer = csv.writer(response)\n writer.writerow(['Title', 'Reviewer', 'Leader', 'Leader Email',\n 'Sponsor', 'Other Sponsor', 'Presenters', 'Funding Source',\n 'Work Type', 'Permission to Reproduce', 'Faculty Sponsor Approval',\n 'Table', 'Electricity', 'Link', 'Poster', 'Date created'])\n for presentation in queryset:\n link = 'http://{0}{1}'.format(settings.SERVER_URL, presentation.\n get_absolute_url())\n poster = 'http://{0}/assets/{1}'.format(settings.SERVER_URL,\n presentation.poster_file)\n try:\n leader = '{0}, {1}'.format(presentation.leader.last_name,\n presentation.leader.first_name)\n except Exception:\n leader = ''\n presenters = ''\n for presenter in presentation.presenters.all():\n if not presenter.leader:\n presenters += '{0}, {1}|'.format(presenter.last_name,\n presenter.first_name)\n title = smart_str(presentation.title, encoding='utf-8',\n strings_only=False, errors='strict')\n funding = smart_str(presentation.funding, encoding='utf-8',\n strings_only=False, errors='strict')\n work_type = smart_str(presentation.work_type, encoding='utf-8',\n strings_only=False, errors='strict')\n sponsor_email = ''\n if presentation.leader:\n sponsor_email = presentation.leader.sponsor_email\n sponsor_other = presentation.leader.sponsor_other\n writer.writerow([title, presentation.reviewer, leader, presentation\n .user.email, sponsor_email, sponsor_other, presenters[:-1],\n funding, work_type, presentation.permission, presentation.\n shared, presentation.need_table, presentation.need_electricity,\n link, poster, presentation.date_created])\n return response\n\n\nexport_scholars.short_description = \"\"\"\n Export the selected Celebration of Scholars Submissions\n\"\"\"\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presentation data model.\"\"\"\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\nadmin.site.register(Presenter, PresenterAdmin)\nadmin.site.register(Presentation, PresentationAdmin)\n",
"<import token>\n\n\ndef export_scholars(modeladmin, request, queryset):\n \"\"\"Export the presentation data.\"\"\"\n response = HttpResponse('', content_type='text/csv; charset=utf-8')\n response['Content-Disposition'] = 'attachment; filename=cos.csv'\n writer = csv.writer(response)\n writer.writerow(['Title', 'Reviewer', 'Leader', 'Leader Email',\n 'Sponsor', 'Other Sponsor', 'Presenters', 'Funding Source',\n 'Work Type', 'Permission to Reproduce', 'Faculty Sponsor Approval',\n 'Table', 'Electricity', 'Link', 'Poster', 'Date created'])\n for presentation in queryset:\n link = 'http://{0}{1}'.format(settings.SERVER_URL, presentation.\n get_absolute_url())\n poster = 'http://{0}/assets/{1}'.format(settings.SERVER_URL,\n presentation.poster_file)\n try:\n leader = '{0}, {1}'.format(presentation.leader.last_name,\n presentation.leader.first_name)\n except Exception:\n leader = ''\n presenters = ''\n for presenter in presentation.presenters.all():\n if not presenter.leader:\n presenters += '{0}, {1}|'.format(presenter.last_name,\n presenter.first_name)\n title = smart_str(presentation.title, encoding='utf-8',\n strings_only=False, errors='strict')\n funding = smart_str(presentation.funding, encoding='utf-8',\n strings_only=False, errors='strict')\n work_type = smart_str(presentation.work_type, encoding='utf-8',\n strings_only=False, errors='strict')\n sponsor_email = ''\n if presentation.leader:\n sponsor_email = presentation.leader.sponsor_email\n sponsor_other = presentation.leader.sponsor_other\n writer.writerow([title, presentation.reviewer, leader, presentation\n .user.email, sponsor_email, sponsor_other, presenters[:-1],\n funding, work_type, presentation.permission, presentation.\n shared, presentation.need_table, presentation.need_electricity,\n link, poster, presentation.date_created])\n return response\n\n\nexport_scholars.short_description = \"\"\"\n Export the selected Celebration of Scholars Submissions\n\"\"\"\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presentation data model.\"\"\"\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\nadmin.site.register(Presenter, PresenterAdmin)\nadmin.site.register(Presentation, PresentationAdmin)\n",
"<import token>\n\n\ndef export_scholars(modeladmin, request, queryset):\n \"\"\"Export the presentation data.\"\"\"\n response = HttpResponse('', content_type='text/csv; charset=utf-8')\n response['Content-Disposition'] = 'attachment; filename=cos.csv'\n writer = csv.writer(response)\n writer.writerow(['Title', 'Reviewer', 'Leader', 'Leader Email',\n 'Sponsor', 'Other Sponsor', 'Presenters', 'Funding Source',\n 'Work Type', 'Permission to Reproduce', 'Faculty Sponsor Approval',\n 'Table', 'Electricity', 'Link', 'Poster', 'Date created'])\n for presentation in queryset:\n link = 'http://{0}{1}'.format(settings.SERVER_URL, presentation.\n get_absolute_url())\n poster = 'http://{0}/assets/{1}'.format(settings.SERVER_URL,\n presentation.poster_file)\n try:\n leader = '{0}, {1}'.format(presentation.leader.last_name,\n presentation.leader.first_name)\n except Exception:\n leader = ''\n presenters = ''\n for presenter in presentation.presenters.all():\n if not presenter.leader:\n presenters += '{0}, {1}|'.format(presenter.last_name,\n presenter.first_name)\n title = smart_str(presentation.title, encoding='utf-8',\n strings_only=False, errors='strict')\n funding = smart_str(presentation.funding, encoding='utf-8',\n strings_only=False, errors='strict')\n work_type = smart_str(presentation.work_type, encoding='utf-8',\n strings_only=False, errors='strict')\n sponsor_email = ''\n if presentation.leader:\n sponsor_email = presentation.leader.sponsor_email\n sponsor_other = presentation.leader.sponsor_other\n writer.writerow([title, presentation.reviewer, leader, presentation\n .user.email, sponsor_email, sponsor_other, presenters[:-1],\n funding, work_type, presentation.permission, presentation.\n shared, presentation.need_table, presentation.need_electricity,\n link, poster, presentation.date_created])\n return response\n\n\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presentation data model.\"\"\"\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\nadmin.site.register(Presenter, PresenterAdmin)\nadmin.site.register(Presentation, PresentationAdmin)\n",
"<import token>\n\n\ndef export_scholars(modeladmin, request, queryset):\n \"\"\"Export the presentation data.\"\"\"\n response = HttpResponse('', content_type='text/csv; charset=utf-8')\n response['Content-Disposition'] = 'attachment; filename=cos.csv'\n writer = csv.writer(response)\n writer.writerow(['Title', 'Reviewer', 'Leader', 'Leader Email',\n 'Sponsor', 'Other Sponsor', 'Presenters', 'Funding Source',\n 'Work Type', 'Permission to Reproduce', 'Faculty Sponsor Approval',\n 'Table', 'Electricity', 'Link', 'Poster', 'Date created'])\n for presentation in queryset:\n link = 'http://{0}{1}'.format(settings.SERVER_URL, presentation.\n get_absolute_url())\n poster = 'http://{0}/assets/{1}'.format(settings.SERVER_URL,\n presentation.poster_file)\n try:\n leader = '{0}, {1}'.format(presentation.leader.last_name,\n presentation.leader.first_name)\n except Exception:\n leader = ''\n presenters = ''\n for presenter in presentation.presenters.all():\n if not presenter.leader:\n presenters += '{0}, {1}|'.format(presenter.last_name,\n presenter.first_name)\n title = smart_str(presentation.title, encoding='utf-8',\n strings_only=False, errors='strict')\n funding = smart_str(presentation.funding, encoding='utf-8',\n strings_only=False, errors='strict')\n work_type = smart_str(presentation.work_type, encoding='utf-8',\n strings_only=False, errors='strict')\n sponsor_email = ''\n if presentation.leader:\n sponsor_email = presentation.leader.sponsor_email\n sponsor_other = presentation.leader.sponsor_other\n writer.writerow([title, presentation.reviewer, leader, presentation\n .user.email, sponsor_email, sponsor_other, presenters[:-1],\n funding, work_type, presentation.permission, presentation.\n shared, presentation.need_table, presentation.need_electricity,\n link, poster, presentation.date_created])\n return response\n\n\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presentation data model.\"\"\"\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presentation data model.\"\"\"\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n <docstring token>\n model = Presentation\n actions = [export_scholars]\n raw_id_fields = 'user', 'updated_by', 'leader'\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('title', 'reviewer', 'last_name', 'first_name', 'email',\n 'sponsor', 'sponsor_other', 'get_presenters', 'funding',\n 'work_type', 'permission', 'shared', 'need_table',\n 'need_electricity', 'status', 'poster', 'date_created')\n ordering = ['-date_created', 'title', 'work_type', 'permission',\n 'shared', 'need_table', 'need_electricity', 'status']\n search_fields = 'title', 'user__last_name', 'user__email', 'funding'\n list_filter = 'status', 'date_created'\n list_editable = ['reviewer']\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n\n def save_model(self, request, obj, form, change):\n \"\"\"Override the save method to update some things.\"\"\"\n if change:\n obj.updated_by = request.user\n obj.save()\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def queryset(self, request):\n \"\"\"Only show presentations that were created after a certain date.\"\"\"\n TODAY = datetime.date.today()\n YEAR = int(TODAY.year)\n qs = super(PresentationAdmin, self).queryset(request)\n start_date = datetime.date(YEAR, 1, 1)\n return qs.filter(date_created__gte=start_date)\n <function token>\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n\n\nclass PresentationAdmin(admin.ModelAdmin):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n<class token>\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n \"\"\"Admin class for the presenter model.\"\"\"\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n<class token>\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n <docstring token>\n model = Presenter\n list_max_show_all = 500\n list_per_page = 500\n list_display = ('date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department')\n ordering = ['date_created', 'last_name', 'first_name', 'email',\n 'leader', 'prez_type', 'college_year', 'major', 'hometown',\n 'sponsor', 'sponsor_name', 'sponsor_email', 'sponsor_other',\n 'department']\n search_fields = 'last_name', 'first_name', 'email'\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n<class token>\n\n\nclass PresenterAdmin(admin.ModelAdmin):\n <docstring token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<code token>\n",
"<import token>\n<function token>\n<assignment token>\n<class token>\n<class token>\n<code token>\n"
] | false |
9,762 |
0e73153d004137d374637abf70faffabf0bab1fb
|
# Generated by Django 3.1 on 2020-09-09 15:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('orders', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='orderproduct',
old_name='products',
new_name='product',
),
]
|
[
"# Generated by Django 3.1 on 2020-09-09 15:58\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('orders', '0001_initial'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='orderproduct',\n old_name='products',\n new_name='product',\n ),\n ]\n",
"from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0001_initial')]\n operations = [migrations.RenameField(model_name='orderproduct',\n old_name='products', new_name='product')]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0001_initial')]\n operations = [migrations.RenameField(model_name='orderproduct',\n old_name='products', new_name='product')]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
9,763 |
398f9f52b83ffddfb452abbeaad2e83610580fee
|
# -*- coding: utf-8 -*-
# project: fshell
# author: s0nnet
# time: 2017-01-08
# desc: data_fuzzhash
import sys
sys.path.append("./dao")
from fss_data_fuzzhash_dao import *
class FssFuzzHash:
@staticmethod
def insert_node(agent_id, data):
return FssFuzzHashDao.insert_node(agent_id, data)
|
[
"# -*- coding: utf-8 -*-\n\n# project: fshell\n# author: s0nnet\n# time: 2017-01-08\n# desc: data_fuzzhash\n\n\nimport sys\nsys.path.append(\"./dao\")\nfrom fss_data_fuzzhash_dao import *\n\n\nclass FssFuzzHash:\n \n @staticmethod\n def insert_node(agent_id, data):\n\n return FssFuzzHashDao.insert_node(agent_id, data)\n",
"import sys\nsys.path.append('./dao')\nfrom fss_data_fuzzhash_dao import *\n\n\nclass FssFuzzHash:\n\n @staticmethod\n def insert_node(agent_id, data):\n return FssFuzzHashDao.insert_node(agent_id, data)\n",
"<import token>\nsys.path.append('./dao')\n<import token>\n\n\nclass FssFuzzHash:\n\n @staticmethod\n def insert_node(agent_id, data):\n return FssFuzzHashDao.insert_node(agent_id, data)\n",
"<import token>\n<code token>\n<import token>\n\n\nclass FssFuzzHash:\n\n @staticmethod\n def insert_node(agent_id, data):\n return FssFuzzHashDao.insert_node(agent_id, data)\n",
"<import token>\n<code token>\n<import token>\n\n\nclass FssFuzzHash:\n <function token>\n",
"<import token>\n<code token>\n<import token>\n<class token>\n"
] | false |
9,764 |
ac5c6a534d5131438d9590b070e6b392d4ebed0c
|
from pynhost.grammars import extension
from pynhost.grammars import baseutils as bu
class AtomExtensionGrammar(extension.ExtensionGrammar):
activate = '{ctrl+alt+8}'
search_chars = bu.merge_dicts(bu.OPERATORS, bu.ALPHABET, bu.CHAR_MAP)
def __init__(self):
super().__init__()
self.app_context = 'Autumntastic'
self.mappings = {
}
|
[
"from pynhost.grammars import extension\nfrom pynhost.grammars import baseutils as bu\n\nclass AtomExtensionGrammar(extension.ExtensionGrammar):\n\n activate = '{ctrl+alt+8}'\n search_chars = bu.merge_dicts(bu.OPERATORS, bu.ALPHABET, bu.CHAR_MAP)\n\n def __init__(self):\n super().__init__()\n self.app_context = 'Autumntastic'\n self.mappings = {\n }\n",
"from pynhost.grammars import extension\nfrom pynhost.grammars import baseutils as bu\n\n\nclass AtomExtensionGrammar(extension.ExtensionGrammar):\n activate = '{ctrl+alt+8}'\n search_chars = bu.merge_dicts(bu.OPERATORS, bu.ALPHABET, bu.CHAR_MAP)\n\n def __init__(self):\n super().__init__()\n self.app_context = 'Autumntastic'\n self.mappings = {}\n",
"<import token>\n\n\nclass AtomExtensionGrammar(extension.ExtensionGrammar):\n activate = '{ctrl+alt+8}'\n search_chars = bu.merge_dicts(bu.OPERATORS, bu.ALPHABET, bu.CHAR_MAP)\n\n def __init__(self):\n super().__init__()\n self.app_context = 'Autumntastic'\n self.mappings = {}\n",
"<import token>\n\n\nclass AtomExtensionGrammar(extension.ExtensionGrammar):\n <assignment token>\n <assignment token>\n\n def __init__(self):\n super().__init__()\n self.app_context = 'Autumntastic'\n self.mappings = {}\n",
"<import token>\n\n\nclass AtomExtensionGrammar(extension.ExtensionGrammar):\n <assignment token>\n <assignment token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,765 |
3aa8c9b39174f0ed5799d6991516b34ca669b7d6
|
from django.db import models # db에 있는 models을 가져옴
from django.utils import timezone # 유틸에 있는 timezone을 가져옴
# Create your models here.
class Post(models.Model):
# Post라는 객체를 정의함 인수로 장고모델을 가져왔음
# 장고모델이기 때문에 데이터베이스에 저장된다.
author = models.ForeignKey('auth.User') # 외래키, 다른 객체에 대한 링크
title = models.CharField(max_length=200) # 글자수 제한
text = models.TextField() # 글자수제한없음
created_date = models.DateTimeField(default=timezone.now) # Date형식
published_date = models.DateTimeField(blank=True, null=True)
def publish(self): # 파이썬의 메소드
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class User(models.Model):
id = models.CharField(max_length=30, primary_key='true')
password = models.CharField(max_length=50)
reg_date = models.DateField(default=timezone.now)
upt_date = models.DateField(default=timezone.now)
last_pwd = models.CharField(max_length=50)
def chg_password(self):
self.last_pwd = self.password
self.save()
def __id__(self):
return self.id
|
[
"from django.db import models # db에 있는 models을 가져옴\nfrom django.utils import timezone # 유틸에 있는 timezone을 가져옴\n\n\n# Create your models here.\n\nclass Post(models.Model):\n # Post라는 객체를 정의함 인수로 장고모델을 가져왔음\n # 장고모델이기 때문에 데이터베이스에 저장된다.\n author = models.ForeignKey('auth.User') # 외래키, 다른 객체에 대한 링크\n title = models.CharField(max_length=200) # 글자수 제한\n text = models.TextField() # 글자수제한없음\n created_date = models.DateTimeField(default=timezone.now) # Date형식\n published_date = models.DateTimeField(blank=True, null=True)\n\n def publish(self): # 파이썬의 메소드\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n\n",
"from django.db import models\nfrom django.utils import timezone\n\n\nclass Post(models.Model):\n author = models.ForeignKey('auth.User')\n title = models.CharField(max_length=200)\n text = models.TextField()\n created_date = models.DateTimeField(default=timezone.now)\n published_date = models.DateTimeField(blank=True, null=True)\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n\n\nclass Post(models.Model):\n author = models.ForeignKey('auth.User')\n title = models.CharField(max_length=200)\n text = models.TextField()\n created_date = models.DateTimeField(default=timezone.now)\n published_date = models.DateTimeField(blank=True, null=True)\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def publish(self):\n self.published_date = timezone.now()\n self.save()\n\n def __str__(self):\n return self.title\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def __str__(self):\n return self.title\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n\n\nclass Post(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n<class token>\n\n\nclass User(models.Model):\n id = models.CharField(max_length=30, primary_key='true')\n password = models.CharField(max_length=50)\n reg_date = models.DateField(default=timezone.now)\n upt_date = models.DateField(default=timezone.now)\n last_pwd = models.CharField(max_length=50)\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n<class token>\n\n\nclass User(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def chg_password(self):\n self.last_pwd = self.password\n self.save()\n\n def __id__(self):\n return self.id\n",
"<import token>\n<class token>\n\n\nclass User(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n def __id__(self):\n return self.id\n",
"<import token>\n<class token>\n\n\nclass User(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n"
] | false |
9,766 |
1fbdb0b40f0d65fffec482b63aa2192968b01d4b
|
#define the simple_divide function here
def simple_divide(item, denom):
# start a try-except block
try:
return item/denom
except ZeroDivisionError:
return 0
def fancy_divide(list_of_numbers, index):
denom = list_of_numbers[index]
return [simple_divide(item, denom) for item in list_of_numbers]
def main():
data = input()
l=data.split()
l1=[]
for j in l:
l1.append(float(j))
s=input()
index=int(s)
print(fancy_divide(l1,index))
if __name__== "__main__":
main()
|
[
"#define the simple_divide function here\ndef simple_divide(item, denom):\n # start a try-except block\n try:\n return item/denom\n except ZeroDivisionError:\n return 0\n \ndef fancy_divide(list_of_numbers, index):\n denom = list_of_numbers[index]\n return [simple_divide(item, denom) for item in list_of_numbers]\n\ndef main():\n data = input()\n l=data.split()\n l1=[]\n for j in l:\n l1.append(float(j))\n s=input()\n index=int(s)\n print(fancy_divide(l1,index))\nif __name__== \"__main__\":\n main()\n",
"def simple_divide(item, denom):\n try:\n return item / denom\n except ZeroDivisionError:\n return 0\n\n\ndef fancy_divide(list_of_numbers, index):\n denom = list_of_numbers[index]\n return [simple_divide(item, denom) for item in list_of_numbers]\n\n\ndef main():\n data = input()\n l = data.split()\n l1 = []\n for j in l:\n l1.append(float(j))\n s = input()\n index = int(s)\n print(fancy_divide(l1, index))\n\n\nif __name__ == '__main__':\n main()\n",
"def simple_divide(item, denom):\n try:\n return item / denom\n except ZeroDivisionError:\n return 0\n\n\ndef fancy_divide(list_of_numbers, index):\n denom = list_of_numbers[index]\n return [simple_divide(item, denom) for item in list_of_numbers]\n\n\ndef main():\n data = input()\n l = data.split()\n l1 = []\n for j in l:\n l1.append(float(j))\n s = input()\n index = int(s)\n print(fancy_divide(l1, index))\n\n\n<code token>\n",
"def simple_divide(item, denom):\n try:\n return item / denom\n except ZeroDivisionError:\n return 0\n\n\n<function token>\n\n\ndef main():\n data = input()\n l = data.split()\n l1 = []\n for j in l:\n l1.append(float(j))\n s = input()\n index = int(s)\n print(fancy_divide(l1, index))\n\n\n<code token>\n",
"def simple_divide(item, denom):\n try:\n return item / denom\n except ZeroDivisionError:\n return 0\n\n\n<function token>\n<function token>\n<code token>\n",
"<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
9,767 |
9e511c769f6ccedc06845a382171fb3729913d05
|
import generic
name = __name__
def options(opt):
generic._options(opt, name)
def configure(cfg):
generic._configure(cfg, name, incs=('czmq.h',), libs=('czmq',),
pcname = name.lower(),
uses = 'LIBZMQ', mandatory=True)
|
[
"import generic\n\nname = __name__\n\ndef options(opt):\n generic._options(opt, name)\n\ndef configure(cfg):\n generic._configure(cfg, name, incs=('czmq.h',), libs=('czmq',),\n pcname = name.lower(),\n uses = 'LIBZMQ', mandatory=True)\n",
"import generic\nname = __name__\n\n\ndef options(opt):\n generic._options(opt, name)\n\n\ndef configure(cfg):\n generic._configure(cfg, name, incs=('czmq.h',), libs=('czmq',), pcname=\n name.lower(), uses='LIBZMQ', mandatory=True)\n",
"<import token>\nname = __name__\n\n\ndef options(opt):\n generic._options(opt, name)\n\n\ndef configure(cfg):\n generic._configure(cfg, name, incs=('czmq.h',), libs=('czmq',), pcname=\n name.lower(), uses='LIBZMQ', mandatory=True)\n",
"<import token>\n<assignment token>\n\n\ndef options(opt):\n generic._options(opt, name)\n\n\ndef configure(cfg):\n generic._configure(cfg, name, incs=('czmq.h',), libs=('czmq',), pcname=\n name.lower(), uses='LIBZMQ', mandatory=True)\n",
"<import token>\n<assignment token>\n\n\ndef options(opt):\n generic._options(opt, name)\n\n\n<function token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n"
] | false |
9,768 |
69ebdab4cd1f0b5154305410381db252205ff97d
|
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
'''
@Description: 数据库迁移
@Author: Zpp
@Date: 2020-03-30 11:01:56
@LastEditors: Zpp
@LastEditTime: 2020-04-28 09:55:26
'''
import sys
import os
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
sys.path.append(rootPath)
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from conf.setting import Config
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = Config().get_sql_url()
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
from models.salary import *
from models.system import *
from models.log import *
# 初始化 migrate
# 两个参数一个是 Flask 的 app,一个是数据库 db
migrate = Migrate(app, db)
# 初始化管理器
manager = Manager(app)
# 添加 db 命令,并与 MigrateCommand 绑定
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
[
"#!/usr/bin/env python\n# -*- coding:UTF-8 -*-\n'''\n@Description: 数据库迁移\n@Author: Zpp\n@Date: 2020-03-30 11:01:56\n@LastEditors: Zpp\n@LastEditTime: 2020-04-28 09:55:26\n'''\nimport sys\nimport os\ncurPath = os.path.abspath(os.path.dirname(__file__))\nrootPath = os.path.split(curPath)[0]\nsys.path.append(rootPath)\n\nfrom flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_script import Manager\nfrom flask_migrate import Migrate, MigrateCommand\nfrom conf.setting import Config\n\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'] = Config().get_sql_url()\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n\ndb = SQLAlchemy(app)\n\nfrom models.salary import *\nfrom models.system import *\nfrom models.log import *\n\n# 初始化 migrate\n# 两个参数一个是 Flask 的 app,一个是数据库 db\nmigrate = Migrate(app, db)\n\n# 初始化管理器\nmanager = Manager(app)\n# 添加 db 命令,并与 MigrateCommand 绑定\nmanager.add_command('db', MigrateCommand)\n\n\nif __name__ == '__main__':\n manager.run()\n",
"<docstring token>\nimport sys\nimport os\ncurPath = os.path.abspath(os.path.dirname(__file__))\nrootPath = os.path.split(curPath)[0]\nsys.path.append(rootPath)\nfrom flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_script import Manager\nfrom flask_migrate import Migrate, MigrateCommand\nfrom conf.setting import Config\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'] = Config().get_sql_url()\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\nfrom models.salary import *\nfrom models.system import *\nfrom models.log import *\nmigrate = Migrate(app, db)\nmanager = Manager(app)\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"<docstring token>\n<import token>\ncurPath = os.path.abspath(os.path.dirname(__file__))\nrootPath = os.path.split(curPath)[0]\nsys.path.append(rootPath)\n<import token>\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'] = Config().get_sql_url()\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\n<import token>\nmigrate = Migrate(app, db)\nmanager = Manager(app)\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"<docstring token>\n<import token>\n<assignment token>\nsys.path.append(rootPath)\n<import token>\n<assignment token>\n<import token>\n<assignment token>\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<import token>\n<assignment token>\n<code token>\n"
] | false |
9,769 |
bf04bf41f657a6ada4777fe5de98d6a68beda9d3
|
import scipy.sparse
from multiprocessing.sharedctypes import Array
from ctypes import c_double
import numpy as np
from multiprocessing import Pool
import matplotlib.pyplot as plt
from time import time
import scipy.io as sio
import sys
# np.random.seed(1)
d = 100
n = 100000
k=10
learning_rate = 0.4
T_freq = 100
num_threads = 1
epochs = 1
Iterations = 10
def getSyntheticData(n,d,k):
mean = np.array([0] * d)
alpha = 0.8
cov_diag = [alpha**i for i in range(d)]
covariance = np.diag(cov_diag)
truth = np.sum(cov_diag[:k])
samples = np.random.multivariate_normal(mean,covariance,n)
return [samples,covariance,truth]
def oja_async(sample):
# print rate_shared[0]
sample = sample.reshape(d,1)
U = np.frombuffer(coef_shared)
U = U.reshape(d,k)
grad = np.dot(sample,np.dot(sample.T,U))
rate_shared[0] = rate_shared[0]+1
U = U + (learning_rate/rate_shared[0])*grad
# U = U + (learning_rate/np.sqrt(rate_shared[0]))*grad
for i in range(d):
for j in range(k):
coef_shared[j+i*k] = U[i][j]
U= np.linalg.qr(U)[0]
if rate_shared[0]%T_freq ==0:
error = truth-np.trace(np.dot(np.dot(U.T,covariance),U))
return [error,time()]
# else:
# return None
def hogwild(samples,k,num_threads):
n = len(samples)
d = len(samples[0])
st = time()
# print num_threads
p = Pool(num_threads)
error_n_times = p.map(oja_async, samples)
error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t!= None]
# print error_n_times_refined;
errors = [ent[0] for ent in error_n_times_refined]
end_times = [ent[1] for ent in error_n_times_refined]
times = [et - st for et in end_times]
errors = [x for _,x in sorted(zip(times,errors))]
times = sorted(times)
n_t_freq = n/T_freq
return [errors[:n_t_freq],times[:n_t_freq]]
def evaluate(model):
data_train = data["train"]
# data_test = data["test"]
covariance_train = np.dot(data_train,data_train.T)/n
# covariance_test = np.dot(data_test,data_test.T)/n
truth_train = np.trace(covariance_train)
# truth_test = np.trace(covariance_test)
# error_train = np.linalg.norm(data_train - np.dot(np.dot(model,model.T),data_train),"fro")/n
# error_test = np.linalg.norm(data_test - np.dot(np.dot(model,model.T),data_test),"fro")/n
error_train = truth_train - np.trace(np.dot(np.dot(model.T,covariance_train),model))
# error_test = truth_test - np.trace(np.dot(np.dot(model.T,covariance_test),model))
# return error_train, error_test
return error_train, error_train
def ojaNormal(samples,k):
errors = []
elapsed_times = []
start_time = time()
U = np.random.randn(d,k)
# U = np.linalg.qr(U)[0]
t = 0
for x in samples:
t=t+1
x = x.reshape(d,1)
U = U + (np.dot(x,np.dot(x.T,U)))*learning_rate/t
if t%T_freq == 0:
U_proj= np.linalg.qr(U)[0]
# U = U_proj
error = truth- np.trace(np.dot(np.dot(U_proj.T,covariance),U_proj))
errors.append(error)
elapsed_times.append(time() - start_time)
U_final = np.linalg.qr(U)[0]
return [errors,elapsed_times]
def plotEverything(errors_oja, times_oja,errors_hogwild_one, times_hogwild_one,errors_hogwild_two, times_hogwild_two,errors_hogwild_four, times_hogwild_four):
plt.figure(0)
plt.xlabel('Time (secs)')
plt.ylabel('Error')
plt.plot(times_oja,errors_oja)
plt.plot(times_hogwild_one,errors_hogwild_one)
plt.plot(times_hogwild_two,errors_hogwild_two)
plt.plot(times_hogwild_four,errors_hogwild_four)
plt.legend(("oja","hogwild, 1 process","hogwild 2 processes","hogwild, 4 processes"))
# plt.legend(("oja","hogwild 2 processes","hogwild, 4 processes"))
plt.title("k = "+str(k))
iterations_oja = range(1,len(errors_oja)+1)
iterations_hogwild_one = range(1,len(errors_hogwild_one)+1)
iterations_hogwild_two = range(1,len(errors_hogwild_two)+1)
iterations_hogwild_four = range(1,len(errors_hogwild_four)+1)
plt.figure(1)
plt.xlabel('Iterations')
plt.ylabel('Error')
plt.plot(iterations_oja,errors_oja)
plt.plot(iterations_hogwild_one,errors_hogwild_one)
plt.plot(iterations_hogwild_two,errors_hogwild_two)
plt.plot(iterations_hogwild_four,errors_hogwild_four)
plt.legend(("oja","hogwild, 1 process","hogwild 2 processes","hogwild, 4 processes"))
# plt.legend(("oja","hogwild 2 processes","hogwild, 4 processes"))
plt.title("k = "+str(k))
plt.show()
[samples,covariance,truth] = getSyntheticData(n,d,k)
total_samples = []
for i in range(epochs):
total_samples.extend(samples)
errors_oja_sum = [0]*n
times_oja_sum = [0]*n
errors_hogwild_sum_one = [0]*n
times_hogwild_sum_one = [0]*n
errors_hogwild_sum_two = [0]*n
times_hogwild_sum_two = [0]*n
errors_hogwild_sum_four= [0]*n
times_hogwild_sum_four = [0]*n
for t in range(Iterations):
[errors_oja, times_oja] = ojaNormal(total_samples,k)
errors_oja_sum = [e_sum + e for (e_sum,e) in zip(errors_oja_sum,errors_oja)]
times_oja_sum = [t_sum + t for (t_sum,t) in zip(times_oja_sum,times_oja)]
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_one, times_hogwild_one] = hogwild(total_samples,k,1)
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_two, times_hogwild_two] = hogwild(total_samples,k,2)
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_four, times_hogwild_four] = hogwild(total_samples,k,4)
errors_hogwild_sum_one = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_one,errors_hogwild_one)]
times_hogwild_sum_one = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_one,times_hogwild_one)]
errors_hogwild_sum_two = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_two,errors_hogwild_two)]
times_hogwild_sum_two = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_two,times_hogwild_two)]
errors_hogwild_sum_four = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_four,errors_hogwild_four)]
times_hogwild_sum_four = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_four,times_hogwild_four)]
errors_oja_average = [e/Iterations for e in errors_oja_sum]
times_oja_average = [t/Iterations for t in times_oja_sum]
times_hogwild_average_one = [t/Iterations for t in times_hogwild_sum_one]
errors_hogwild_average_one = [e/Iterations for e in errors_hogwild_sum_one]
times_hogwild_average_two = [t/Iterations for t in times_hogwild_sum_two]
errors_hogwild_average_two = [e/Iterations for e in errors_hogwild_sum_two]
times_hogwild_average_four = [t/Iterations for t in times_hogwild_sum_four]
errors_hogwild_average_four = [e/Iterations for e in errors_hogwild_sum_four]
plotEverything(errors_oja_average, times_oja_average,errors_hogwild_average_one, times_hogwild_average_one,errors_hogwild_average_two, times_hogwild_average_two,errors_hogwild_average_four, times_hogwild_average_four)
|
[
"import scipy.sparse\nfrom multiprocessing.sharedctypes import Array\nfrom ctypes import c_double\nimport numpy as np\nfrom multiprocessing import Pool\nimport matplotlib.pyplot as plt\nfrom time import time\nimport scipy.io as sio\nimport sys\n# np.random.seed(1)\n\n\n\nd = 100\nn = 100000\nk=10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\ndef getSyntheticData(n,d,k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [alpha**i for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k]) \n samples = np.random.multivariate_normal(mean,covariance,n)\n return [samples,covariance,truth]\n\n\ndef oja_async(sample):\n # print rate_shared[0]\n sample = sample.reshape(d,1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d,k)\n grad = np.dot(sample,np.dot(sample.T,U))\n rate_shared[0] = rate_shared[0]+1\n U = U + (learning_rate/rate_shared[0])*grad\n # U = U + (learning_rate/np.sqrt(rate_shared[0]))*grad\n\n for i in range(d):\n for j in range(k):\n coef_shared[j+i*k] = U[i][j]\n\n U= np.linalg.qr(U)[0]\n if rate_shared[0]%T_freq ==0:\n error = truth-np.trace(np.dot(np.dot(U.T,covariance),U))\n return [error,time()]\n # else:\n # return None\n\ndef hogwild(samples,k,num_threads):\n n = len(samples)\n d = len(samples[0])\n\n st = time()\n # print num_threads\n p = Pool(num_threads) \n\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t!= None]\n # print error_n_times_refined;\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [et - st for et in end_times]\n errors = [x for _,x in sorted(zip(times,errors))]\n times = sorted(times)\n\n n_t_freq = n/T_freq\n return [errors[:n_t_freq],times[:n_t_freq]]\n \n\n\ndef evaluate(model):\n data_train = data[\"train\"]\n# data_test = data[\"test\"]\n covariance_train = np.dot(data_train,data_train.T)/n\n# covariance_test = np.dot(data_test,data_test.T)/n\n truth_train = np.trace(covariance_train)\n# truth_test = np.trace(covariance_test)\n# error_train = np.linalg.norm(data_train - np.dot(np.dot(model,model.T),data_train),\"fro\")/n\n# error_test = np.linalg.norm(data_test - np.dot(np.dot(model,model.T),data_test),\"fro\")/n\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,covariance_train),model))\n# error_test = truth_test - np.trace(np.dot(np.dot(model.T,covariance_test),model))\n# return error_train, error_test\n return error_train, error_train\n\ndef ojaNormal(samples,k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d,k)\n # U = np.linalg.qr(U)[0]\n\n t = 0\n for x in samples:\n t=t+1\n x = x.reshape(d,1)\n U = U + (np.dot(x,np.dot(x.T,U)))*learning_rate/t\n if t%T_freq == 0:\n U_proj= np.linalg.qr(U)[0]\n # U = U_proj\n error = truth- np.trace(np.dot(np.dot(U_proj.T,covariance),U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n\n U_final = np.linalg.qr(U)[0]\n return [errors,elapsed_times] \n\n\n\ndef plotEverything(errors_oja, times_oja,errors_hogwild_one, times_hogwild_one,errors_hogwild_two, times_hogwild_two,errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja,errors_oja)\n plt.plot(times_hogwild_one,errors_hogwild_one)\n plt.plot(times_hogwild_two,errors_hogwild_two)\n plt.plot(times_hogwild_four,errors_hogwild_four)\n plt.legend((\"oja\",\"hogwild, 1 process\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n # plt.legend((\"oja\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n plt.title(\"k = \"+str(k))\n\n iterations_oja = range(1,len(errors_oja)+1)\n iterations_hogwild_one = range(1,len(errors_hogwild_one)+1)\n iterations_hogwild_two = range(1,len(errors_hogwild_two)+1)\n iterations_hogwild_four = range(1,len(errors_hogwild_four)+1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja,errors_oja)\n plt.plot(iterations_hogwild_one,errors_hogwild_one)\n plt.plot(iterations_hogwild_two,errors_hogwild_two)\n plt.plot(iterations_hogwild_four,errors_hogwild_four)\n plt.legend((\"oja\",\"hogwild, 1 process\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n # plt.legend((\"oja\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n plt.title(\"k = \"+str(k))\n plt.show()\n\n\n[samples,covariance,truth] = getSyntheticData(n,d,k)\ntotal_samples = []\n\nfor i in range(epochs):\n total_samples.extend(samples)\n\nerrors_oja_sum = [0]*n\ntimes_oja_sum = [0]*n\n\nerrors_hogwild_sum_one = [0]*n\ntimes_hogwild_sum_one = [0]*n\n\n\nerrors_hogwild_sum_two = [0]*n\ntimes_hogwild_sum_two = [0]*n\n\nerrors_hogwild_sum_four= [0]*n\ntimes_hogwild_sum_four = [0]*n\n\n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples,k)\n\n errors_oja_sum = [e_sum + e for (e_sum,e) in zip(errors_oja_sum,errors_oja)]\n times_oja_sum = [t_sum + t for (t_sum,t) in zip(times_oja_sum,times_oja)]\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples,k,1)\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples,k,2)\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples,k,4)\n\n\n errors_hogwild_sum_one = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_one,errors_hogwild_one)]\n times_hogwild_sum_one = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_one,times_hogwild_one)]\n\n\n errors_hogwild_sum_two = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_two,errors_hogwild_two)]\n times_hogwild_sum_two = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_two,times_hogwild_two)]\n\n errors_hogwild_sum_four = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_four,errors_hogwild_four)]\n times_hogwild_sum_four = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_four,times_hogwild_four)]\n\nerrors_oja_average = [e/Iterations for e in errors_oja_sum]\ntimes_oja_average = [t/Iterations for t in times_oja_sum]\n\ntimes_hogwild_average_one = [t/Iterations for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [e/Iterations for e in errors_hogwild_sum_one]\n\ntimes_hogwild_average_two = [t/Iterations for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [e/Iterations for e in errors_hogwild_sum_two]\n\ntimes_hogwild_average_four = [t/Iterations for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [e/Iterations for e in errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,errors_hogwild_average_one, times_hogwild_average_one,errors_hogwild_average_two, times_hogwild_average_two,errors_hogwild_average_four, times_hogwild_average_four)\n\n",
"import scipy.sparse\nfrom multiprocessing.sharedctypes import Array\nfrom ctypes import c_double\nimport numpy as np\nfrom multiprocessing import Pool\nimport matplotlib.pyplot as plt\nfrom time import time\nimport scipy.io as sio\nimport sys\nd = 100\nn = 100000\nk = 10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n[samples, covariance, truth] = getSyntheticData(n, d, k)\ntotal_samples = []\nfor i in range(epochs):\n total_samples.extend(samples)\nerrors_oja_sum = [0] * n\ntimes_oja_sum = [0] * n\nerrors_hogwild_sum_one = [0] * n\ntimes_hogwild_sum_one = [0] * n\nerrors_hogwild_sum_two = [0] * n\ntimes_hogwild_sum_two = [0] * n\nerrors_hogwild_sum_four = [0] * n\ntimes_hogwild_sum_four = [0] * n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples, k)\n errors_oja_sum = [(e_sum + e) for e_sum, e in zip(errors_oja_sum,\n errors_oja)]\n times_oja_sum = [(t_sum + t) for t_sum, t in zip(times_oja_sum, times_oja)]\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples, k, 1)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples, k, 2)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples, k, 4)\n errors_hogwild_sum_one = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_one, errors_hogwild_one)]\n times_hogwild_sum_one = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_one, times_hogwild_one)]\n errors_hogwild_sum_two = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_two, errors_hogwild_two)]\n times_hogwild_sum_two = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_two, times_hogwild_two)]\n errors_hogwild_sum_four = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_four, errors_hogwild_four)]\n times_hogwild_sum_four = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_four, times_hogwild_four)]\nerrors_oja_average = [(e / Iterations) for e in errors_oja_sum]\ntimes_oja_average = [(t / Iterations) for t in times_oja_sum]\ntimes_hogwild_average_one = [(t / Iterations) for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [(e / Iterations) for e in errors_hogwild_sum_one]\ntimes_hogwild_average_two = [(t / Iterations) for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [(e / Iterations) for e in errors_hogwild_sum_two]\ntimes_hogwild_average_four = [(t / Iterations) for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [(e / Iterations) for e in\n errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,\n errors_hogwild_average_one, times_hogwild_average_one,\n errors_hogwild_average_two, times_hogwild_average_two,\n errors_hogwild_average_four, times_hogwild_average_four)\n",
"<import token>\nd = 100\nn = 100000\nk = 10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n[samples, covariance, truth] = getSyntheticData(n, d, k)\ntotal_samples = []\nfor i in range(epochs):\n total_samples.extend(samples)\nerrors_oja_sum = [0] * n\ntimes_oja_sum = [0] * n\nerrors_hogwild_sum_one = [0] * n\ntimes_hogwild_sum_one = [0] * n\nerrors_hogwild_sum_two = [0] * n\ntimes_hogwild_sum_two = [0] * n\nerrors_hogwild_sum_four = [0] * n\ntimes_hogwild_sum_four = [0] * n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples, k)\n errors_oja_sum = [(e_sum + e) for e_sum, e in zip(errors_oja_sum,\n errors_oja)]\n times_oja_sum = [(t_sum + t) for t_sum, t in zip(times_oja_sum, times_oja)]\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples, k, 1)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples, k, 2)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples, k, 4)\n errors_hogwild_sum_one = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_one, errors_hogwild_one)]\n times_hogwild_sum_one = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_one, times_hogwild_one)]\n errors_hogwild_sum_two = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_two, errors_hogwild_two)]\n times_hogwild_sum_two = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_two, times_hogwild_two)]\n errors_hogwild_sum_four = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_four, errors_hogwild_four)]\n times_hogwild_sum_four = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_four, times_hogwild_four)]\nerrors_oja_average = [(e / Iterations) for e in errors_oja_sum]\ntimes_oja_average = [(t / Iterations) for t in times_oja_sum]\ntimes_hogwild_average_one = [(t / Iterations) for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [(e / Iterations) for e in errors_hogwild_sum_one]\ntimes_hogwild_average_two = [(t / Iterations) for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [(e / Iterations) for e in errors_hogwild_sum_two]\ntimes_hogwild_average_four = [(t / Iterations) for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [(e / Iterations) for e in\n errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,\n errors_hogwild_average_one, times_hogwild_average_one,\n errors_hogwild_average_two, times_hogwild_average_two,\n errors_hogwild_average_four, times_hogwild_average_four)\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<assignment token>\nfor i in range(epochs):\n total_samples.extend(samples)\n<assignment token>\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples, k)\n errors_oja_sum = [(e_sum + e) for e_sum, e in zip(errors_oja_sum,\n errors_oja)]\n times_oja_sum = [(t_sum + t) for t_sum, t in zip(times_oja_sum, times_oja)]\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples, k, 1)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples, k, 2)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples, k, 4)\n errors_hogwild_sum_one = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_one, errors_hogwild_one)]\n times_hogwild_sum_one = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_one, times_hogwild_one)]\n errors_hogwild_sum_two = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_two, errors_hogwild_two)]\n times_hogwild_sum_two = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_two, times_hogwild_two)]\n errors_hogwild_sum_four = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_four, errors_hogwild_four)]\n times_hogwild_sum_four = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_four, times_hogwild_four)]\n<assignment token>\nplotEverything(errors_oja_average, times_oja_average,\n errors_hogwild_average_one, times_hogwild_average_one,\n errors_hogwild_average_two, times_hogwild_average_two,\n errors_hogwild_average_four, times_hogwild_average_four)\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<function token>\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<function token>\n<function token>\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,770 |
ebc2acbcbab787b07c97b0a4ea8fbaeb9d8e30aa
|
30. Convertir P libras inglesas a D dólares y C centavos. Usar el tipo de cambio $2.80 = 1 libra
p=2.80
x=int(input("Desea convertir sus libras a dolar(1) o a centavos(2)"))
if x == 1:
d=float(input("¿Cuantas libras desea convertir a dólar?\n"))
conversion = (d/p)
if x == 2:
c=float(input("¿Cuantas libras desea convertir a centavos?\n"))
conversion = c/100
print("El resultado es:")
print(float(conversion))
|
[
"30. Convertir P libras inglesas a D dólares y C centavos. Usar el tipo de cambio $2.80 = 1 libra\r\np=2.80\r\n\r\nx=int(input(\"Desea convertir sus libras a dolar(1) o a centavos(2)\"))\r\n\r\nif x == 1:\r\n d=float(input(\"¿Cuantas libras desea convertir a dólar?\\n\"))\r\n conversion = (d/p)\r\nif x == 2:\r\n c=float(input(\"¿Cuantas libras desea convertir a centavos?\\n\"))\r\n conversion = c/100\r\nprint(\"El resultado es:\")\r\nprint(float(conversion))\r\n"
] | true |
9,771 |
eafe89de10c4187057b0cc1e0e9772f03a576b0d
|
__version__ = "1.2.0"
import hashlib
from collections import Counter
from re import findall
from secrets import choice
from string import ascii_letters, ascii_lowercase, ascii_uppercase
from string import digits as all_digits
from string import punctuation
import requests
def check_password(password):
"""Check a given password against known data breaches
Note:
This method uses the `Have I Been Pwned <https://haveibeenpwned.com/>`_ Passwords API. The unhashed password nor its full `SHA-1 <https://en.wikipedia.org/wiki/SHA-1>`_ hash never leave the device.
Args:
password (str): The password to check
Returns:
int: The number of times the password has been found
"""
sha1 = hashlib.sha1(password.encode("utf-8")).hexdigest()
response = requests.get(f"https://api.pwnedpasswords.com/range/{sha1[:5]}")
hash_suffix_list = [x.split(":") for x in response.text.splitlines(False)]
try:
count = [
count for suffix, count in hash_suffix_list if sha1.endswith(suffix.lower())
][0]
except IndexError:
return 0
return int(count)
class PasswordRequirements:
"""A set of requirements to check passwords against
Keyword Args:
min_length (int): The minimum length of the password
min_digits (int): The minimum number of digits in the password
min_special (int): The minimum number of special characters in the password
min_alpha (int): The minimum number of alphabetical characters in the password
min_upper (int): The minimum number of uppercase letters in the password
min_lower (int): The minimum number of lowercase letters in the password
check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)
func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements
"""
def __init__(
self,
*,
min_length=0,
min_digits=0,
min_special=0,
min_alpha=0,
min_upper=0,
min_lower=0,
check_breaches=False,
func=None,
):
self.min_length = min_length
self.min_digits = min_digits
self.min_special = min_special
self.min_alpha = min_alpha
self.min_upper = min_upper
self.min_lower = min_lower
self.check_breaches = check_breaches
self.func = func
def check(self, password):
"""Check a password against the requirements
Args:
password (str): The password to check
Returns:
bool: Whether the password meets all the given requirements
"""
if len(password) < self.min_length:
return False
digits = len(findall(r"\d", password))
if digits < self.min_digits:
return False
special_chars = sum(v for k, v in Counter(password).items() if k in punctuation)
if special_chars < self.min_special:
return False
alpha_chars = sum(v for k, v in Counter(password).items() if k in ascii_letters)
if alpha_chars < self.min_alpha:
return False
upper_chars = sum(
v for k, v in Counter(password).items() if k in ascii_uppercase
)
if upper_chars < self.min_upper:
return False
lower_chars = sum(
v for k, v in Counter(password).items() if k in ascii_lowercase
)
if lower_chars < self.min_lower:
return False
if self.check_breaches and check_password(password):
return False
if self.func and not self.func(password):
return False
return True
class PasswordGenerator:
"""A random password generator
Args:
length (int): The length of the password
Keyword Args:
uppercase (bool): Whether to allow uppercase letters in the password
lowercase (bool): Whether to allow lowercase letters in the password
digits (bool): Whether to allow numerical digits in the password
special (bool): Whether to allow special characters in the password
"""
def __init__(
self, length, *, uppercase=True, lowercase=True, digits=True, special=True
):
self.length = length
self.uppercase = uppercase
self.lowercase = lowercase
self.digits = digits
self.special = special
def generate(
self, length=None, uppercase=None, lowercase=None, digits=None, special=None
):
"""Generate a random password
Keyword Args:
length (int): The length of the password
uppercase (bool): Whether to allow uppercase letters in the password
lowercase (bool): Whether to allow lowercase letters in the password
digits (bool): Whether to allow numerical digits in the password
special (bool): Whether to allow special characters in the password
Returns:
str: The freshly generated password
"""
if length is None:
length = self.length
allowed_chars = ""
if uppercase is not None:
allowed_chars += ascii_uppercase if uppercase else ""
elif self.uppercase:
allowed_chars += ascii_uppercase
if lowercase is not None:
allowed_chars += ascii_lowercase if lowercase else ""
elif self.lowercase:
allowed_chars += ascii_lowercase
if digits is not None:
allowed_chars += all_digits if digits else ""
elif self.digits:
allowed_chars += all_digits
if special is not None:
allowed_chars += punctuation if special else ""
elif self.special:
allowed_chars += punctuation
return "".join(choice(allowed_chars) for _ in range(length))
def __len__(self):
return self.length if self.length >= 0 else 0
|
[
"__version__ = \"1.2.0\"\n\nimport hashlib\nfrom collections import Counter\nfrom re import findall\nfrom secrets import choice\nfrom string import ascii_letters, ascii_lowercase, ascii_uppercase\nfrom string import digits as all_digits\nfrom string import punctuation\n\nimport requests\n\n\ndef check_password(password):\n \"\"\"Check a given password against known data breaches\n\n Note:\n This method uses the `Have I Been Pwned <https://haveibeenpwned.com/>`_ Passwords API. The unhashed password nor its full `SHA-1 <https://en.wikipedia.org/wiki/SHA-1>`_ hash never leave the device.\n\n Args:\n password (str): The password to check\n\n Returns:\n int: The number of times the password has been found\n \"\"\"\n\n sha1 = hashlib.sha1(password.encode(\"utf-8\")).hexdigest()\n\n response = requests.get(f\"https://api.pwnedpasswords.com/range/{sha1[:5]}\")\n\n hash_suffix_list = [x.split(\":\") for x in response.text.splitlines(False)]\n\n try:\n count = [\n count for suffix, count in hash_suffix_list if sha1.endswith(suffix.lower())\n ][0]\n except IndexError:\n return 0\n\n return int(count)\n\n\nclass PasswordRequirements:\n \"\"\"A set of requirements to check passwords against\n\n Keyword Args:\n min_length (int): The minimum length of the password\n min_digits (int): The minimum number of digits in the password\n min_special (int): The minimum number of special characters in the password\n min_alpha (int): The minimum number of alphabetical characters in the password\n min_upper (int): The minimum number of uppercase letters in the password\n min_lower (int): The minimum number of lowercase letters in the password\n check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)\n func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements\n \"\"\"\n\n def __init__(\n self,\n *,\n min_length=0,\n min_digits=0,\n min_special=0,\n min_alpha=0,\n min_upper=0,\n min_lower=0,\n check_breaches=False,\n func=None,\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n\n if len(password) < self.min_length:\n return False\n\n digits = len(findall(r\"\\d\", password))\n if digits < self.min_digits:\n return False\n\n special_chars = sum(v for k, v in Counter(password).items() if k in punctuation)\n if special_chars < self.min_special:\n return False\n\n alpha_chars = sum(v for k, v in Counter(password).items() if k in ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n\n upper_chars = sum(\n v for k, v in Counter(password).items() if k in ascii_uppercase\n )\n if upper_chars < self.min_upper:\n return False\n\n lower_chars = sum(\n v for k, v in Counter(password).items() if k in ascii_lowercase\n )\n if lower_chars < self.min_lower:\n return False\n\n if self.check_breaches and check_password(password):\n return False\n\n if self.func and not self.func(password):\n return False\n\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(\n self, length, *, uppercase=True, lowercase=True, digits=True, special=True\n ):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(\n self, length=None, uppercase=None, lowercase=None, digits=None, special=None\n ):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n \n allowed_chars = \"\"\n\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else \"\"\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else \"\"\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n\n if digits is not None:\n allowed_chars += all_digits if digits else \"\"\n elif self.digits:\n allowed_chars += all_digits\n\n if special is not None:\n allowed_chars += punctuation if special else \"\"\n elif self.special:\n allowed_chars += punctuation\n\n return \"\".join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"__version__ = '1.2.0'\nimport hashlib\nfrom collections import Counter\nfrom re import findall\nfrom secrets import choice\nfrom string import ascii_letters, ascii_lowercase, ascii_uppercase\nfrom string import digits as all_digits\nfrom string import punctuation\nimport requests\n\n\ndef check_password(password):\n \"\"\"Check a given password against known data breaches\n\n Note:\n This method uses the `Have I Been Pwned <https://haveibeenpwned.com/>`_ Passwords API. The unhashed password nor its full `SHA-1 <https://en.wikipedia.org/wiki/SHA-1>`_ hash never leave the device.\n\n Args:\n password (str): The password to check\n\n Returns:\n int: The number of times the password has been found\n \"\"\"\n sha1 = hashlib.sha1(password.encode('utf-8')).hexdigest()\n response = requests.get(f'https://api.pwnedpasswords.com/range/{sha1[:5]}')\n hash_suffix_list = [x.split(':') for x in response.text.splitlines(False)]\n try:\n count = [count for suffix, count in hash_suffix_list if sha1.\n endswith(suffix.lower())][0]\n except IndexError:\n return 0\n return int(count)\n\n\nclass PasswordRequirements:\n \"\"\"A set of requirements to check passwords against\n\n Keyword Args:\n min_length (int): The minimum length of the password\n min_digits (int): The minimum number of digits in the password\n min_special (int): The minimum number of special characters in the password\n min_alpha (int): The minimum number of alphabetical characters in the password\n min_upper (int): The minimum number of uppercase letters in the password\n min_lower (int): The minimum number of lowercase letters in the password\n check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)\n func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements\n \"\"\"\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n if len(password) < self.min_length:\n return False\n digits = len(findall('\\\\d', password))\n if digits < self.min_digits:\n return False\n special_chars = sum(v for k, v in Counter(password).items() if k in\n punctuation)\n if special_chars < self.min_special:\n return False\n alpha_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n upper_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_uppercase)\n if upper_chars < self.min_upper:\n return False\n lower_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_lowercase)\n if lower_chars < self.min_lower:\n return False\n if self.check_breaches and check_password(password):\n return False\n if self.func and not self.func(password):\n return False\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"__version__ = '1.2.0'\n<import token>\n\n\ndef check_password(password):\n \"\"\"Check a given password against known data breaches\n\n Note:\n This method uses the `Have I Been Pwned <https://haveibeenpwned.com/>`_ Passwords API. The unhashed password nor its full `SHA-1 <https://en.wikipedia.org/wiki/SHA-1>`_ hash never leave the device.\n\n Args:\n password (str): The password to check\n\n Returns:\n int: The number of times the password has been found\n \"\"\"\n sha1 = hashlib.sha1(password.encode('utf-8')).hexdigest()\n response = requests.get(f'https://api.pwnedpasswords.com/range/{sha1[:5]}')\n hash_suffix_list = [x.split(':') for x in response.text.splitlines(False)]\n try:\n count = [count for suffix, count in hash_suffix_list if sha1.\n endswith(suffix.lower())][0]\n except IndexError:\n return 0\n return int(count)\n\n\nclass PasswordRequirements:\n \"\"\"A set of requirements to check passwords against\n\n Keyword Args:\n min_length (int): The minimum length of the password\n min_digits (int): The minimum number of digits in the password\n min_special (int): The minimum number of special characters in the password\n min_alpha (int): The minimum number of alphabetical characters in the password\n min_upper (int): The minimum number of uppercase letters in the password\n min_lower (int): The minimum number of lowercase letters in the password\n check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)\n func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements\n \"\"\"\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n if len(password) < self.min_length:\n return False\n digits = len(findall('\\\\d', password))\n if digits < self.min_digits:\n return False\n special_chars = sum(v for k, v in Counter(password).items() if k in\n punctuation)\n if special_chars < self.min_special:\n return False\n alpha_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n upper_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_uppercase)\n if upper_chars < self.min_upper:\n return False\n lower_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_lowercase)\n if lower_chars < self.min_lower:\n return False\n if self.check_breaches and check_password(password):\n return False\n if self.func and not self.func(password):\n return False\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n\n\ndef check_password(password):\n \"\"\"Check a given password against known data breaches\n\n Note:\n This method uses the `Have I Been Pwned <https://haveibeenpwned.com/>`_ Passwords API. The unhashed password nor its full `SHA-1 <https://en.wikipedia.org/wiki/SHA-1>`_ hash never leave the device.\n\n Args:\n password (str): The password to check\n\n Returns:\n int: The number of times the password has been found\n \"\"\"\n sha1 = hashlib.sha1(password.encode('utf-8')).hexdigest()\n response = requests.get(f'https://api.pwnedpasswords.com/range/{sha1[:5]}')\n hash_suffix_list = [x.split(':') for x in response.text.splitlines(False)]\n try:\n count = [count for suffix, count in hash_suffix_list if sha1.\n endswith(suffix.lower())][0]\n except IndexError:\n return 0\n return int(count)\n\n\nclass PasswordRequirements:\n \"\"\"A set of requirements to check passwords against\n\n Keyword Args:\n min_length (int): The minimum length of the password\n min_digits (int): The minimum number of digits in the password\n min_special (int): The minimum number of special characters in the password\n min_alpha (int): The minimum number of alphabetical characters in the password\n min_upper (int): The minimum number of uppercase letters in the password\n min_lower (int): The minimum number of lowercase letters in the password\n check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)\n func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements\n \"\"\"\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n if len(password) < self.min_length:\n return False\n digits = len(findall('\\\\d', password))\n if digits < self.min_digits:\n return False\n special_chars = sum(v for k, v in Counter(password).items() if k in\n punctuation)\n if special_chars < self.min_special:\n return False\n alpha_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n upper_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_uppercase)\n if upper_chars < self.min_upper:\n return False\n lower_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_lowercase)\n if lower_chars < self.min_lower:\n return False\n if self.check_breaches and check_password(password):\n return False\n if self.func and not self.func(password):\n return False\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n\n\nclass PasswordRequirements:\n \"\"\"A set of requirements to check passwords against\n\n Keyword Args:\n min_length (int): The minimum length of the password\n min_digits (int): The minimum number of digits in the password\n min_special (int): The minimum number of special characters in the password\n min_alpha (int): The minimum number of alphabetical characters in the password\n min_upper (int): The minimum number of uppercase letters in the password\n min_lower (int): The minimum number of lowercase letters in the password\n check_breaches (bool): Whether to ensure that passwords aren't found in known data breaches (uses :meth:`~passwd.check_password`)\n func (function): A function that takes in a password (:class:`str`) and returns a :class:`bool` that must be ``True`` for the password to meet all requirements\n \"\"\"\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n if len(password) < self.min_length:\n return False\n digits = len(findall('\\\\d', password))\n if digits < self.min_digits:\n return False\n special_chars = sum(v for k, v in Counter(password).items() if k in\n punctuation)\n if special_chars < self.min_special:\n return False\n alpha_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n upper_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_uppercase)\n if upper_chars < self.min_upper:\n return False\n lower_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_lowercase)\n if lower_chars < self.min_lower:\n return False\n if self.check_breaches and check_password(password):\n return False\n if self.func and not self.func(password):\n return False\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n\n\nclass PasswordRequirements:\n <docstring token>\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n\n def check(self, password):\n \"\"\"Check a password against the requirements\n\n Args:\n password (str): The password to check\n\n Returns:\n bool: Whether the password meets all the given requirements\n \"\"\"\n if len(password) < self.min_length:\n return False\n digits = len(findall('\\\\d', password))\n if digits < self.min_digits:\n return False\n special_chars = sum(v for k, v in Counter(password).items() if k in\n punctuation)\n if special_chars < self.min_special:\n return False\n alpha_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_letters)\n if alpha_chars < self.min_alpha:\n return False\n upper_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_uppercase)\n if upper_chars < self.min_upper:\n return False\n lower_chars = sum(v for k, v in Counter(password).items() if k in\n ascii_lowercase)\n if lower_chars < self.min_lower:\n return False\n if self.check_breaches and check_password(password):\n return False\n if self.func and not self.func(password):\n return False\n return True\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n\n\nclass PasswordRequirements:\n <docstring token>\n\n def __init__(self, *, min_length=0, min_digits=0, min_special=0,\n min_alpha=0, min_upper=0, min_lower=0, check_breaches=False, func=None\n ):\n self.min_length = min_length\n self.min_digits = min_digits\n self.min_special = min_special\n self.min_alpha = min_alpha\n self.min_upper = min_upper\n self.min_lower = min_lower\n self.check_breaches = check_breaches\n self.func = func\n <function token>\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n\n\nclass PasswordRequirements:\n <docstring token>\n <function token>\n <function token>\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n\n\nclass PasswordGenerator:\n \"\"\"A random password generator\n\n Args:\n length (int): The length of the password\n\n Keyword Args:\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n \"\"\"\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n\n\nclass PasswordGenerator:\n <docstring token>\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n\n def generate(self, length=None, uppercase=None, lowercase=None, digits=\n None, special=None):\n \"\"\"Generate a random password\n\n Keyword Args:\n length (int): The length of the password\n uppercase (bool): Whether to allow uppercase letters in the password\n lowercase (bool): Whether to allow lowercase letters in the password\n digits (bool): Whether to allow numerical digits in the password\n special (bool): Whether to allow special characters in the password\n\n Returns:\n str: The freshly generated password\n \"\"\"\n if length is None:\n length = self.length\n allowed_chars = ''\n if uppercase is not None:\n allowed_chars += ascii_uppercase if uppercase else ''\n elif self.uppercase:\n allowed_chars += ascii_uppercase\n if lowercase is not None:\n allowed_chars += ascii_lowercase if lowercase else ''\n elif self.lowercase:\n allowed_chars += ascii_lowercase\n if digits is not None:\n allowed_chars += all_digits if digits else ''\n elif self.digits:\n allowed_chars += all_digits\n if special is not None:\n allowed_chars += punctuation if special else ''\n elif self.special:\n allowed_chars += punctuation\n return ''.join(choice(allowed_chars) for _ in range(length))\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n\n\nclass PasswordGenerator:\n <docstring token>\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n <function token>\n\n def __len__(self):\n return self.length if self.length >= 0 else 0\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n\n\nclass PasswordGenerator:\n <docstring token>\n\n def __init__(self, length, *, uppercase=True, lowercase=True, digits=\n True, special=True):\n self.length = length\n self.uppercase = uppercase\n self.lowercase = lowercase\n self.digits = digits\n self.special = special\n <function token>\n <function token>\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n\n\nclass PasswordGenerator:\n <docstring token>\n <function token>\n <function token>\n <function token>\n",
"<assignment token>\n<import token>\n<function token>\n<class token>\n<class token>\n"
] | false |
9,772 |
e7b96c0161e65f3f22f2ad0832fc6d1bb529f150
|
"""
In search.py, you will implement generic search algorithms which are called
by Pacman agents (in searchAgents.py).
"""
import util
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions. The sequence must
be composed of legal moves
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other
maze, the sequence of moves will be incorrect, so only use this for tinyMaze
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s,s,w,s,w,w,s,w]
def depthFirstSearch(problem):
"""
Search the deepest nodes in the search tree first
Your search algorithm needs to return a list of actions that reaches
the goal. Make sure to implement a graph search algorithm
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print("Start:", problem.getStartState())
print("Is the start a goal?", problem.isGoalState(problem.getStartState()))
print("Start's successors:", problem.getSuccessors(problem.getStartState()))
"""
"*** YOUR CODE HERE ***"
# Frontier stored in a Stack
frontier = util.Stack()
# Visited states stored in a list
visitedStates = []
# Format of each element: (current coordinates, [path taken to get there])
frontier.push((problem.getStartState(), []))
# while there are still states to explore
while not frontier.isEmpty():
# store the current state and path in separate variables
currentState, pathTaken = frontier.pop()
# for skipping states that have already been visited
if currentState in visitedStates:
continue
# for returning the correct path to the goal state upon discovering it
if problem.isGoalState(currentState):
return pathTaken
# count the current state as "visited"
visitedStates.append(currentState)
# for each successor state, check whether they have already been visited. if not, add their coordinates to the frontier, and append their respective direction to the path list
for coordinates, direction, cost in problem.getSuccessors(currentState):
if coordinates not in visitedStates:
frontier.push((coordinates, pathTaken + [direction]))
util.raiseNotDefined()
def breadthFirstSearch(problem):
"""
Search the shallowest nodes in the search tree first.
"""
"*** YOUR CODE HERE ***"
# BFS is identical to DFS, save for the data structure used to store the frontier
# Frontier stored in a Queue
frontier = util.Queue()
# Visited states stored in a list
visitedStates = []
# Format of each element: (current coordinates, [path taken to get there])
frontier.push((problem.getStartState(), []))
# while there are still states to explore
while not frontier.isEmpty():
# store the current state and path in separate variables
currentState, pathTaken = frontier.pop()
# for skipping states that have already been visited
if currentState in visitedStates:
continue
# for returning the correct path to the goal state upon discovering it
if problem.isGoalState(currentState):
return pathTaken
# count the current state as "visited"
visitedStates.append(currentState)
# for each successor state, check whether they have already been visited. if not, add their coordinates to the frontier, and append their respective direction to the path list
for coordinates, direction, cost in problem.getSuccessors(currentState):
if coordinates not in visitedStates:
frontier.push((coordinates, pathTaken + [direction]))
util.raiseNotDefined()
def uniformCostSearch(problem):
"Search the node of least total cost first. "
"*** YOUR CODE HERE ***"
#UCS is similar to DFS and BFS, save for a few key differences
# Frontier stored in a Priority Queue
frontier = util.PriorityQueue()
# Visited states stored in a list
visitedStates = []
# Format of each element: ((current coordinates, [path taken to get there]), cost)
frontier.push((problem.getStartState(), []), 0)
# while there are still states to explore
while not frontier.isEmpty():
# store the current state and path in separate variables
currentState, pathTaken = frontier.pop()
# for skipping states that have already been visited
if currentState in visitedStates:
continue
# for returning the correct path to the goal state upon discovering it
if problem.isGoalState(currentState):
return pathTaken
# count the current state as "visited"
visitedStates.append(currentState)
# for each successor state, check whether they have already been visited.
for coordinates, direction, cost in problem.getSuccessors(currentState):
if coordinates not in visitedStates:
# if not, re-calculate the cost to reach the given coordinates, and push the updated information to the frontier
newCost = problem.getCostOfActions(pathTaken + [direction])
frontier.push((coordinates, pathTaken + [direction]), newCost)
util.raiseNotDefined()
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
"Search the node that has the lowest combined cost and heuristic first."
"*** YOUR CODE HERE ***"
# A* is different in that the heuristic argument provided is included in some parts
# Frontier stored in a Priority Queue
frontier = util.PriorityQueue()
# Visited states stored in a list
visitedStates = []
# Format of each element: ((current coordinates, [path taken to get there]), heuristic function)
frontier.push((problem.getStartState(), []), heuristic(problem.getStartState(), problem))
# while there are still states to explore
while not frontier.isEmpty():
# store the current state and path in separate variables
currentState, pathTaken = frontier.pop()
# for skipping states that have already been visited
if currentState in visitedStates:
continue
# for returning the correct path to the goal state upon discovering it
if problem.isGoalState(currentState):
return pathTaken
# count the current state as "visited"
visitedStates.append(currentState)
# for each successor state, check whether they have already been visited.
for coordinates, direction, cost in problem.getSuccessors(currentState):
if coordinates not in visitedStates:
# if not, re-calculate the cost to reach the given coordinates, and push the updated information to the frontier. Here, unlike UCS, the heuristic function is added to the newCost variable
newCost = problem.getCostOfActions(pathTaken + [direction]) + heuristic(coordinates, problem)
frontier.push((coordinates, pathTaken + [direction]), newCost)
util.raiseNotDefined()
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch
|
[
"\"\"\"\nIn search.py, you will implement generic search algorithms which are called\nby Pacman agents (in searchAgents.py).\n\"\"\"\n\nimport util\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s,s,w,s,w,w,s,w]\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n\n # Frontier stored in a Stack\n frontier = util.Stack()\n\n # Visited states stored in a list\n visitedStates = []\n\n # Format of each element: (current coordinates, [path taken to get there]) \n frontier.push((problem.getStartState(), []))\n\n # while there are still states to explore\n while not frontier.isEmpty():\n \n # store the current state and path in separate variables\n currentState, pathTaken = frontier.pop()\n\n # for skipping states that have already been visited\n if currentState in visitedStates:\n continue\n\n # for returning the correct path to the goal state upon discovering it\n if problem.isGoalState(currentState):\n return pathTaken\n\n # count the current state as \"visited\"\n visitedStates.append(currentState)\n\n # for each successor state, check whether they have already been visited. if not, add their coordinates to the frontier, and append their respective direction to the path list\n for coordinates, direction, cost in problem.getSuccessors(currentState):\n\n if coordinates not in visitedStates:\n \n frontier.push((coordinates, pathTaken + [direction]))\n\n\n util.raiseNotDefined()\n\ndef breadthFirstSearch(problem):\n \"\"\"\n Search the shallowest nodes in the search tree first.\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n\n # BFS is identical to DFS, save for the data structure used to store the frontier\n\n # Frontier stored in a Queue\n frontier = util.Queue()\n\n # Visited states stored in a list\n visitedStates = []\n\n # Format of each element: (current coordinates, [path taken to get there])\n frontier.push((problem.getStartState(), []))\n\n # while there are still states to explore\n while not frontier.isEmpty():\n\n # store the current state and path in separate variables\n currentState, pathTaken = frontier.pop()\n\n # for skipping states that have already been visited\n if currentState in visitedStates:\n continue\n\n # for returning the correct path to the goal state upon discovering it\n if problem.isGoalState(currentState):\n return pathTaken\n\n # count the current state as \"visited\"\n visitedStates.append(currentState)\n\n # for each successor state, check whether they have already been visited. if not, add their coordinates to the frontier, and append their respective direction to the path list\n for coordinates, direction, cost in problem.getSuccessors(currentState):\n\n if coordinates not in visitedStates:\n\n frontier.push((coordinates, pathTaken + [direction]))\n\n util.raiseNotDefined()\n\ndef uniformCostSearch(problem):\n \"Search the node of least total cost first. \"\n \"*** YOUR CODE HERE ***\"\n\n #UCS is similar to DFS and BFS, save for a few key differences\n\n # Frontier stored in a Priority Queue\n frontier = util.PriorityQueue()\n\n # Visited states stored in a list\n visitedStates = []\n\n # Format of each element: ((current coordinates, [path taken to get there]), cost)\n frontier.push((problem.getStartState(), []), 0)\n\n # while there are still states to explore\n while not frontier.isEmpty():\n\n # store the current state and path in separate variables\n currentState, pathTaken = frontier.pop()\n\n # for skipping states that have already been visited\n if currentState in visitedStates:\n continue\n\n # for returning the correct path to the goal state upon discovering it\n if problem.isGoalState(currentState):\n return pathTaken\n\n # count the current state as \"visited\"\n visitedStates.append(currentState)\n\n # for each successor state, check whether they have already been visited. \n \n for coordinates, direction, cost in problem.getSuccessors(currentState):\n\n if coordinates not in visitedStates:\n # if not, re-calculate the cost to reach the given coordinates, and push the updated information to the frontier\n newCost = problem.getCostOfActions(pathTaken + [direction])\n\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n\n util.raiseNotDefined()\n\ndef nullHeuristic(state, problem=None):\n \"\"\"\n A heuristic function estimates the cost from the current state to the nearest\n goal in the provided SearchProblem. This heuristic is trivial.\n \"\"\"\n return 0\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"Search the node that has the lowest combined cost and heuristic first.\"\n \"*** YOUR CODE HERE ***\"\n\n # A* is different in that the heuristic argument provided is included in some parts\n\n # Frontier stored in a Priority Queue\n frontier = util.PriorityQueue()\n\n # Visited states stored in a list\n visitedStates = []\n\n # Format of each element: ((current coordinates, [path taken to get there]), heuristic function)\n frontier.push((problem.getStartState(), []), heuristic(problem.getStartState(), problem))\n\n # while there are still states to explore\n while not frontier.isEmpty():\n\n # store the current state and path in separate variables\n currentState, pathTaken = frontier.pop()\n\n # for skipping states that have already been visited\n if currentState in visitedStates:\n continue\n\n # for returning the correct path to the goal state upon discovering it\n if problem.isGoalState(currentState):\n return pathTaken\n\n # count the current state as \"visited\"\n visitedStates.append(currentState)\n\n # for each successor state, check whether they have already been visited.\n for coordinates, direction, cost in problem.getSuccessors(currentState):\n\n if coordinates not in visitedStates:\n # if not, re-calculate the cost to reach the given coordinates, and push the updated information to the frontier. Here, unlike UCS, the heuristic function is added to the newCost variable\n newCost = problem.getCostOfActions(pathTaken + [direction]) + heuristic(coordinates, problem)\n\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n\n util.raiseNotDefined()\n\n# Abbreviations\nbfs = breadthFirstSearch\ndfs = depthFirstSearch\nastar = aStarSearch\nucs = uniformCostSearch\n",
"<docstring token>\nimport util\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s, s, w, s, w, w, s, w]\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef breadthFirstSearch(problem):\n \"\"\"\n Search the shallowest nodes in the search tree first.\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Queue()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\ndef nullHeuristic(state, problem=None):\n \"\"\"\n A heuristic function estimates the cost from the current state to the nearest\n goal in the provided SearchProblem. This heuristic is trivial.\n \"\"\"\n return 0\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\nbfs = breadthFirstSearch\ndfs = depthFirstSearch\nastar = aStarSearch\nucs = uniformCostSearch\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s, s, w, s, w, w, s, w]\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef breadthFirstSearch(problem):\n \"\"\"\n Search the shallowest nodes in the search tree first.\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Queue()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\ndef nullHeuristic(state, problem=None):\n \"\"\"\n A heuristic function estimates the cost from the current state to the nearest\n goal in the provided SearchProblem. This heuristic is trivial.\n \"\"\"\n return 0\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\nbfs = breadthFirstSearch\ndfs = depthFirstSearch\nastar = aStarSearch\nucs = uniformCostSearch\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s, s, w, s, w, w, s, w]\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef breadthFirstSearch(problem):\n \"\"\"\n Search the shallowest nodes in the search tree first.\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Queue()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\ndef nullHeuristic(state, problem=None):\n \"\"\"\n A heuristic function estimates the cost from the current state to the nearest\n goal in the provided SearchProblem. This heuristic is trivial.\n \"\"\"\n return 0\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s, s, w, s, w, w, s, w]\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\ndef nullHeuristic(state, problem=None):\n \"\"\"\n A heuristic function estimates the cost from the current state to the nearest\n goal in the provided SearchProblem. This heuristic is trivial.\n \"\"\"\n return 0\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\ndef tinyMazeSearch(problem):\n \"\"\"\n Returns a sequence of moves that solves tinyMaze. For any other\n maze, the sequence of moves will be incorrect, so only use this for tinyMaze\n \"\"\"\n from game import Directions\n s = Directions.SOUTH\n w = Directions.WEST\n return [s, s, w, s, w, w, s, w]\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef uniformCostSearch(problem):\n \"\"\"Search the node of least total cost first. \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), 0)\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction])\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\n<function token>\n\n\ndef depthFirstSearch(problem):\n \"\"\"\n Search the deepest nodes in the search tree first\n\n Your search algorithm needs to return a list of actions that reaches\n the goal. Make sure to implement a graph search algorithm\n\n To get started, you might want to try some of these simple commands to\n understand the search problem that is being passed in:\n\n print(\"Start:\", problem.getStartState())\n print(\"Is the start a goal?\", problem.isGoalState(problem.getStartState()))\n print(\"Start's successors:\", problem.getSuccessors(problem.getStartState()))\n \"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.Stack()\n visitedStates = []\n frontier.push((problem.getStartState(), []))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n frontier.push((coordinates, pathTaken + [direction]))\n util.raiseNotDefined()\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef aStarSearch(problem, heuristic=nullHeuristic):\n \"\"\"Search the node that has the lowest combined cost and heuristic first.\"\"\"\n \"\"\"*** YOUR CODE HERE ***\"\"\"\n frontier = util.PriorityQueue()\n visitedStates = []\n frontier.push((problem.getStartState(), []), heuristic(problem.\n getStartState(), problem))\n while not frontier.isEmpty():\n currentState, pathTaken = frontier.pop()\n if currentState in visitedStates:\n continue\n if problem.isGoalState(currentState):\n return pathTaken\n visitedStates.append(currentState)\n for coordinates, direction, cost in problem.getSuccessors(currentState\n ):\n if coordinates not in visitedStates:\n newCost = problem.getCostOfActions(pathTaken + [direction]\n ) + heuristic(coordinates, problem)\n frontier.push((coordinates, pathTaken + [direction]), newCost)\n util.raiseNotDefined()\n\n\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n \"\"\"\n This class outlines the structure of a search problem, but doesn't implement\n any of the methods (in object-oriented terminology: an abstract class).\n\n You do not need to change anything in this class, ever.\n \"\"\"\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n <docstring token>\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n\n def getCostOfActions(self, actions):\n \"\"\"\n actions: A list of actions to take\n\n This method returns the total cost of a particular sequence of actions. The sequence must\n be composed of legal moves\n \"\"\"\n util.raiseNotDefined()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n <docstring token>\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n\n def isGoalState(self, state):\n \"\"\"\n state: Search state\n\n Returns True if and only if the state is a valid goal state\n \"\"\"\n util.raiseNotDefined()\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n <docstring token>\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n <function token>\n\n def getSuccessors(self, state):\n \"\"\"\n state: Search state\n\n For a given state, this should return a list of triples,\n (successor, action, stepCost), where 'successor' is a\n successor to the current state, 'action' is the action\n required to get there, and 'stepCost' is the incremental\n cost of expanding to that successor\n \"\"\"\n util.raiseNotDefined()\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n <docstring token>\n\n def getStartState(self):\n \"\"\"\n Returns the start state for the search problem\n \"\"\"\n util.raiseNotDefined()\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n\n\nclass SearchProblem:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n",
"<docstring token>\n<import token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n"
] | false |
9,773 |
13e27c29839286988b37d2d3685f54d42fd57973
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Christiaan Frans Rademan <[email protected]>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from luxon import register
from luxon import router
from luxon.helpers.api import sql_list, obj
from infinitystone.models.roles import infinitystone_role
@register.resources()
class Roles(object):
def __init__(self):
router.add('GET', '/v1/role/{id}', self.role,
tag='roles:view')
router.add('GET', '/v1/roles', self.roles,
tag='roles:view')
router.add('POST', '/v1/role', self.create,
tag='roles:admin')
router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update,
tag='roles:admin')
router.add('DELETE', '/v1/role/{id}', self.delete,
tag='roles:admin')
def role(self, req, resp, id):
return obj(req, infinitystone_role, sql_id=id)
def roles(self, req, resp):
return sql_list(req, 'infinitystone_role',
search={'id': str,
'name': str})
def create(self, req, resp):
role = obj(req, infinitystone_role)
role.commit()
return role
def update(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
def delete(self, req, resp, id):
role = obj(req, infinitystone_role, sql_id=id)
role.commit()
return role
|
[
"# -*- coding: utf-8 -*-\n# Copyright (c) 2018-2020 Christiaan Frans Rademan <[email protected]>.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holders nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF\n# THE POSSIBILITY OF SUCH DAMAGE.\nfrom luxon import register\nfrom luxon import router\nfrom luxon.helpers.api import sql_list, obj\n\nfrom infinitystone.models.roles import infinitystone_role\n\n\[email protected]()\nclass Roles(object):\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role,\n tag='roles:view')\n router.add('GET', '/v1/roles', self.roles,\n tag='roles:view')\n router.add('POST', '/v1/role', self.create,\n tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update,\n tag='roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete,\n tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role',\n search={'id': str,\n 'name': str})\n\n def create(self, req, resp):\n role = obj(req, infinitystone_role)\n role.commit()\n return role\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"from luxon import register\nfrom luxon import router\nfrom luxon.helpers.api import sql_list, obj\nfrom infinitystone.models.roles import infinitystone_role\n\n\[email protected]()\nclass Roles(object):\n\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')\n router.add('GET', '/v1/roles', self.roles, tag='roles:view')\n router.add('POST', '/v1/role', self.create, tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=\n 'roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n\n def create(self, req, resp):\n role = obj(req, infinitystone_role)\n role.commit()\n return role\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')\n router.add('GET', '/v1/roles', self.roles, tag='roles:view')\n router.add('POST', '/v1/role', self.create, tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=\n 'roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n\n def create(self, req, resp):\n role = obj(req, infinitystone_role)\n role.commit()\n return role\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n\n def __init__(self):\n router.add('GET', '/v1/role/{id}', self.role, tag='roles:view')\n router.add('GET', '/v1/roles', self.roles, tag='roles:view')\n router.add('POST', '/v1/role', self.create, tag='roles:admin')\n router.add(['PUT', 'PATCH'], '/v1/role/{id}', self.update, tag=\n 'roles:admin')\n router.add('DELETE', '/v1/role/{id}', self.delete, tag='roles:admin')\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <function token>\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n <function token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <function token>\n\n def update(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n <function token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <function token>\n <function token>\n\n def delete(self, req, resp, id):\n role = obj(req, infinitystone_role, sql_id=id)\n role.commit()\n return role\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n <function token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n\n def roles(self, req, resp):\n return sql_list(req, 'infinitystone_role', search={'id': str,\n 'name': str})\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n <function token>\n\n def role(self, req, resp, id):\n return obj(req, infinitystone_role, sql_id=id)\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\[email protected]()\nclass Roles(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,774 |
f72cdf8d91c31760335b96052a34615307f48727
|
from cpp_service.SubService import SubService
import config
if __name__ == "__main__":
gateway = config.gateway["trading_system_gateway"]
host = gateway["host"]
port = gateway["port"]
server_id = gateway["server_id"]
licences = gateway["licences"]
service = SubService(host, port, server_id, licences)
"""订阅order"""
service.sub_order()
|
[
"from cpp_service.SubService import SubService\nimport config\n\nif __name__ == \"__main__\":\n gateway = config.gateway[\"trading_system_gateway\"]\n host = gateway[\"host\"]\n port = gateway[\"port\"]\n server_id = gateway[\"server_id\"]\n licences = gateway[\"licences\"]\n\n service = SubService(host, port, server_id, licences)\n \"\"\"订阅order\"\"\"\n service.sub_order()\n",
"from cpp_service.SubService import SubService\nimport config\nif __name__ == '__main__':\n gateway = config.gateway['trading_system_gateway']\n host = gateway['host']\n port = gateway['port']\n server_id = gateway['server_id']\n licences = gateway['licences']\n service = SubService(host, port, server_id, licences)\n \"\"\"订阅order\"\"\"\n service.sub_order()\n",
"<import token>\nif __name__ == '__main__':\n gateway = config.gateway['trading_system_gateway']\n host = gateway['host']\n port = gateway['port']\n server_id = gateway['server_id']\n licences = gateway['licences']\n service = SubService(host, port, server_id, licences)\n \"\"\"订阅order\"\"\"\n service.sub_order()\n",
"<import token>\n<code token>\n"
] | false |
9,775 |
00b4a57537358797bfe37eee76bbf73ef42de081
|
#Define a function max_of_three() that takes three numbers as
#arguments and returns the largest of them.
def max_of_three(a,b,c):
max=0
if a > b:
max = a
else:
max = b
if max > c :
return max
else:
return c
print max(234,124,43)
def max_of_three2(a, b, c):
if a > b and a > c:
print a
elif b > c:
print b
else:
print c
print max_of_three2(0, 15, 2)
|
[
"\n\n\n#Define a function max_of_three() that takes three numbers as\n#arguments and returns the largest of them.\n\n\n\n\ndef max_of_three(a,b,c):\n\n max=0\n if a > b:\n max = a\n else:\n max = b\n\n if max > c :\n return max\n else:\n return c\n\n\n\nprint max(234,124,43)\n\n\ndef max_of_three2(a, b, c):\n if a > b and a > c:\n print a\n elif b > c:\n print b\n else:\n print c\n\n\nprint max_of_three2(0, 15, 2)"
] | true |
9,776 |
286953e381d03c0817d57f9ee4e15f2a0ce808a9
|
from django_evolution.mutations import ChangeField
MUTATIONS = [
ChangeField('ReviewRequest', 'depends_on', initial=None, null=False),
ChangeField('ReviewRequestDraft', 'depends_on', initial=None, null=False),
]
|
[
"from django_evolution.mutations import ChangeField\n\n\nMUTATIONS = [\n ChangeField('ReviewRequest', 'depends_on', initial=None, null=False),\n ChangeField('ReviewRequestDraft', 'depends_on', initial=None, null=False),\n]\n",
"from django_evolution.mutations import ChangeField\nMUTATIONS = [ChangeField('ReviewRequest', 'depends_on', initial=None, null=\n False), ChangeField('ReviewRequestDraft', 'depends_on', initial=None,\n null=False)]\n",
"<import token>\nMUTATIONS = [ChangeField('ReviewRequest', 'depends_on', initial=None, null=\n False), ChangeField('ReviewRequestDraft', 'depends_on', initial=None,\n null=False)]\n",
"<import token>\n<assignment token>\n"
] | false |
9,777 |
8279f8a80d96a7231e35100d2c39fa5e1f34f5f5
|
from scipy.cluster.hierarchy import dendrogram, linkage
from get_train import get, pre
import matplotlib.pyplot as plt
#%%
index = [
'BAC',
'JPM',
'GS',
'C',
'AAPL',
'IBM',
'MSFT',
'ORCL'
]
years = [
2010,
2013,
2016
]
features = [
'TOTAL ASSETS',
'Cash & Equivalents',
'Receivables - Total (Net)',
'Inventories - Total',
'Sales (Net)',
'Cost of Good Sold',
'GROSS PROFIT'
]
methods = [
'single',
'complete',
'average',
'ward'
]
#%%
fig, axes = plt.subplots(4, 3, figsize=(16, 9))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05)
i = 0
j = 0
for year in years:
train = get(year, features, index)
train = pre(train)
for method in methods:
ax = axes[i, j]
Z = linkage(train, method=method)
dn = dendrogram(Z, ax=ax, labels=index)
ax.set_yticks([])
i += 1
j += 1
i = 0
for i in range(3):
axes[i, 0].set_ylabel(
methods[i],
rotation=0,
labelpad=25
)
axes[3, 0].set_ylabel(
'WARD',
rotation=0,
labelpad=25,
color='r'
)
for j in range(3):
axes[0, j].set_title(years[j])
|
[
"from scipy.cluster.hierarchy import dendrogram, linkage\r\nfrom get_train import get, pre\r\nimport matplotlib.pyplot as plt\r\n#%%\r\nindex = [\r\n 'BAC', \r\n 'JPM', \r\n 'GS', \r\n 'C',\r\n 'AAPL', \r\n 'IBM', \r\n 'MSFT', \r\n 'ORCL'\r\n ]\r\n\r\nyears = [\r\n 2010,\r\n 2013,\r\n 2016\r\n ]\r\n\r\nfeatures = [\r\n 'TOTAL ASSETS', \r\n 'Cash & Equivalents',\r\n 'Receivables - Total (Net)',\r\n 'Inventories - Total',\r\n 'Sales (Net)',\r\n 'Cost of Good Sold',\r\n 'GROSS PROFIT'\r\n ]\r\n\r\nmethods = [\r\n 'single', \r\n 'complete', \r\n 'average', \r\n 'ward'\r\n ]\r\n\r\n#%%\r\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\r\nfig.tight_layout()\r\nfig.subplots_adjust(wspace=0.05)\r\n\r\ni = 0\r\nj = 0\r\nfor year in years:\r\n train = get(year, features, index)\r\n train = pre(train)\r\n \r\n for method in methods:\r\n ax = axes[i, j]\r\n Z = linkage(train, method=method)\r\n dn = dendrogram(Z, ax=ax, labels=index)\r\n ax.set_yticks([])\r\n \r\n i += 1\r\n \r\n j += 1\r\n i = 0\r\n\r\nfor i in range(3):\r\n axes[i, 0].set_ylabel(\r\n methods[i], \r\n rotation=0, \r\n labelpad=25\r\n )\r\naxes[3, 0].set_ylabel(\r\n 'WARD', \r\n rotation=0, \r\n labelpad=25,\r\n color='r'\r\n )\r\n \r\nfor j in range(3):\r\n axes[0, j].set_title(years[j])\r\n ",
"from scipy.cluster.hierarchy import dendrogram, linkage\nfrom get_train import get, pre\nimport matplotlib.pyplot as plt\nindex = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']\nyears = [2010, 2013, 2016]\nfeatures = ['TOTAL ASSETS', 'Cash & Equivalents',\n 'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',\n 'Cost of Good Sold', 'GROSS PROFIT']\nmethods = ['single', 'complete', 'average', 'ward']\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\ni = 0\nj = 0\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"<import token>\nindex = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']\nyears = [2010, 2013, 2016]\nfeatures = ['TOTAL ASSETS', 'Cash & Equivalents',\n 'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',\n 'Cost of Good Sold', 'GROSS PROFIT']\nmethods = ['single', 'complete', 'average', 'ward']\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\ni = 0\nj = 0\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"<import token>\n<assignment token>\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\n<assignment token>\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,778 |
6339a1a06319a748030b3411c7a8d00f36336e65
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class OpenStackDeprecationWarning(DeprecationWarning):
"""Base class for warnings about deprecated features in openstacksdk."""
class RemovedResourceWarning(OpenStackDeprecationWarning):
"""Indicates that a resource has been removed in newer API versions and
should not be used.
"""
class RemovedFieldWarning(OpenStackDeprecationWarning):
"""Indicates that a field has been removed in newer API versions and should
not be used.
"""
class LegacyAPIWarning(OpenStackDeprecationWarning):
"""Indicates an API that is in 'legacy' status, a long term deprecation."""
class OpenStackWarning(Warning):
"""Base class for general warnings in openstacksdk."""
class ConfigurationWarning(OpenStackWarning):
"""Indicates an issue with configuration."""
class UnsupportedServiceVersion(OpenStackWarning):
"""Indicates a major version that SDK doesn't understand."""
|
[
"# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nclass OpenStackDeprecationWarning(DeprecationWarning):\n \"\"\"Base class for warnings about deprecated features in openstacksdk.\"\"\"\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"class OpenStackDeprecationWarning(DeprecationWarning):\n \"\"\"Base class for warnings about deprecated features in openstacksdk.\"\"\"\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"class OpenStackDeprecationWarning(DeprecationWarning):\n <docstring token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n <docstring token>\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n <docstring token>\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n <docstring token>\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass OpenStackWarning(Warning):\n <docstring token>\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass ConfigurationWarning(OpenStackWarning):\n <docstring token>\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n <docstring token>\n",
"<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
9,779 |
2f9a081845685a4748c8b028ae4ee3a056a10284
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of CbM (https://github.com/ec-jrc/cbm).
# Author : Konstantinos Anastasakis
# Credits : GTCAP Team
# Copyright : 2021 European Commission, Joint Research Centre
# License : 3-Clause BSD
import os
import glob
from ipywidgets import (Text, Label, HBox, VBox, Layout, Dropdown,
ToggleButtons, Output, HTML, Button,
FileUpload, IntText, RadioButtons)
from cbm.utils import config
from cbm.ipycbm.utils import settings_ds, cbm_widgets
from cbm.ipycbm.ipy_ext import ext_func
from cbm.foi import foi_v1
from cbm.datas import db
try:
from cbm.foi import foi_v2
except Exception as err:
print(err)
def foi_tab_v1():
path_foi = f"{config.get_value(['paths', 'temp'])}/foi/"
path_foi_func = foi_v1.path_foi_func
progress = Output()
def outlog(*text):
with progress:
print(*text)
foi_info = HTML("""FOI procedures version 1 (requires access to a database).
""", placeholder='FOI Information')
# Connect to database
config_info = HTML(value="""1. Connect to database and object storage.<br>
FOI procedures need direct access to the database. In case there no
image is provided, access to object storage will be needed as well
to generate the base image from sentinel images.
""", placeholder='FOI Information')
config_conn = Button(
value=False,
button_style='info',
tooltip='Configure db connection.',
icon='cogs',
layout=Layout(width='40px')
)
config_conn_box = HBox([])
@config_conn.on_click
def config_conn_on_click(b):
if config_conn_box.children == ():
config_conn_box.children = [settings_ds.direct_conn()]
else:
config_conn_box.children = ()
config_box = VBox([config_info, config_conn,
config_conn_box])
# Spatial data to be tested
spatial_info = HTML(
"""2. Select the spatial data to be tested - parcels that will be
checked for heterogeneity and cardinality.<br>
- Select a table from the database""")
db_tables = Dropdown(
options=[],
description='db Tables:'
)
refresh_db_tables = Button(
value=False,
button_style='info',
tooltip='Get db tables.',
icon='refresh',
layout=Layout(width='40px')
)
@refresh_db_tables.on_click
def refresh_db_tables_on_click(b):
db_tables.options = db.tables(config.get_value(['set', 'db_conn']))
db_tables_box = HBox([db_tables, refresh_db_tables])
upload_shp = Button(
description='Create new table',
value=False,
button_style='info',
tooltip='upload_shp.',
icon='up'
)
upload_box = VBox([])
@upload_shp.on_click
def upload_shp_on_click(b):
if upload_box.children == ():
upload_box.children = [ext_func.upload_shp(path_foi, True)]
else:
upload_box.children = ()
spatial_box = VBox([spatial_info, upload_shp, upload_box, db_tables_box])
# Thematic raster.
img_info = HTML(
"""3. Thematic raster - classification raster, or raster from other
source that will be used for testing heterogeneity and cardinality.<br>
- Upload or generate raster base image.
(Only upload is currently available)""")
img_option = ToggleButtons(
options=['Upload', 'Generate'],
value=None,
disabled=True,
button_style='info', # 'success', 'info', 'warning', 'danger' or ''
tooltips=['Upnload your base image', 'Get from object storage']
)
def on_img_option_change(change):
if img_option.value == 'Upload':
img_box.children = [HBox([img_info, img_option, img_file])]
else:
img_box.children = ()
img_option.observe(on_img_option_change, 'value')
img_file = cbm_widgets.get_files_dropdown(
f'{path_foi}raster', '.tif, .tiff', 'Select Raster')
img_box = VBox([img_info, img_option, img_file])
# YAML File upload
yml_info = HTML(
"""4. YAML file that holds the classes form the thematic raster.<br>
- This can be also a simple list of values in the notebook
corespondence between pixel values and names for the classes""")
yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',
'Select YML')
yml_box = VBox([yml_info, yml_file])
# Database functions
dbf_info = HTML("""5. Create database functions.<br>
- Import required database functions for FOI analysis to the database""")
dbf_insert = Button(
value=False,
button_style='info',
tooltip='Create functions.',
icon='fa-share-square'
)
@dbf_insert.on_click
def dbf_insert_on_click(b):
outlog('path_foi_func :', path_foi_func)
progress.clear_output()
try:
functions = glob.glob(f"{path_foi_func}*.func")
db = config.get_value(['set', 'db_conn'])
sche = config.get_value(['db', db, 'sche'])
user = config.get_value(['db', db, 'user'])
for f in functions:
db.insert_function(open(f).read().format(
schema=sche, owner=user))
outlog(f"The '{f}' Was imported to the database.")
finc_list = [
f"ipycbm_{f.split('/')[-1].split('.')[0]}, " for f in functions]
outlog(
f"The functions: {('').join(finc_list)} where added to the database")
except Exception as err:
outlog("Could not add functions to dattabase.", err)
dbf_box = VBox(
[dbf_info, dbf_insert])
# FOI Parameters
param_info = HTML(
"""6. Set FOI v1 Parameters""")
# heterogeneity_threshold
param_heto_info = HTML("""
Minimum and maximum thresholds for heterogeneity checks. In the example,
any parcel with percentage of pixels for one class between 30 and 70 from
the total, will be considered heterogenous.
""")
param_min_het = IntText(
value=30,
description='MIN:',
tooltip="Minimum threshold for heterogeneity checks",
layout=Layout(width='150px')
)
param_max_het = IntText(
value=70,
description='MAX:',
tooltip="Maximum threshold for heterogeneity checks",
layout=Layout(width='150px')
)
param_area_info = HTML("""Minimum area for clusters selection -
only clusters bigger from this threshold will be counted.
""")
param_area = IntText(
value=2000,
description='area:',
tooltip="Minimum area for clusters selection.",
layout=Layout(width='200px')
)
param_box = VBox([param_info,
param_heto_info, HBox([param_min_het, param_max_het]),
param_area_info, param_area
])
# Run FOI analysis
run_info = Label("7. Run the FOI analysis.")
run_analysis = Button(
description='Run FOI v1',
value=False,
button_style='info',
tooltip='Run FOI analysis version 1',
icon='play',
)
run_box = VBox([run_info, run_analysis])
@run_analysis.on_click
def run_analysis_on_click(b):
with progress:
foi_v1.main(
db_tables.value,
f"{path_foi}raster/{img_file.children[1].children[0].value}",
f"{path_foi}{yml_file.children[1].children[0].value}",
param_min_het.value, param_max_het.value, param_area.value)
wbox = VBox([foi_info,
config_box,
spatial_box,
img_box,
yml_box,
dbf_box,
param_box,
run_box,
progress])
return wbox
def foi_tab_v2():
path_foi = f"{config.get_value(['paths', 'temp'])}/foi/"
progress = Output()
def outlog(*text):
with progress:
print(*text)
foi_info = HTML("""FOI procedures version 2 (does not require access to a database).
""", placeholder='FOI Information')
# Vector file
shp_info = HTML(
"""1. Spatial data to be tested -
parcels that will be checked for heterogeneity and cardinality.""")
shp_file = cbm_widgets.get_files_dropdown(
f'{path_foi}vector', '', 'Select .shp', True, True)
shp_box = VBox([shp_info, shp_file])
# Thematic raster.
img_info = HTML(
"""2. Thematic raster - classification raster, or raster from other
source that will be used for testing heterogeneity and cardinality.<br>
- Upload or generate raster base image.
(Only upload is currently available)""")
img_option = ToggleButtons(
options=['Upload', 'Generate'],
value=None,
disabled=True,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltips=['Upnload your base image', 'Get from object storage']
)
def on_img_option_change(change):
if img_option.value == 'Upload':
img_box.children = [HBox([img_info, img_option, img_file])]
else:
img_box.children = ()
img_option.observe(on_img_option_change, 'value')
img_file = cbm_widgets.get_files_dropdown(
f'{path_foi}raster', '.tif, .tiff', 'Select Raster')
img_box = VBox([img_info, img_option, img_file])
# YAML File upload
yml_info = HTML(
"""3. YAML file that holds the classes form the thematic raster.<br>
- This can be also a simple list of values in the notebook
corespondence between pixel values and names for the classes""")
yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',
'Select YML')
yml_box = VBox([yml_info, yml_file])
# FOI Prerequisites
pre_info = Label("4. Set FOI v2 Parameters.")
# heterogeneity_threshold
pre_heto_chec = HTML("""
Minimum and maximum thresholds for heterogeneity checks. In the example,
any parcel with percentage of pixels for one class between 30 and 70 from
the total, will be considered heterogenous.
""")
pre_min_het = IntText(
value=30,
description='MIN:',
tooltip="Minimum threshold for heterogeneity checks",
disabled=False,
layout=Layout(width='150px')
)
pre_max_het = IntText(
value=70,
description='MAX:',
tooltip="Maximum threshold for heterogeneity checks",
disabled=False,
layout=Layout(width='150px')
)
pre_heto_chec_box = HBox([pre_min_het, pre_max_het])
pre_min_cluster_size = IntText(
value=20,
description='pixels:',
tooltip="Minimum area for clusters selection.",
disabled=False,
layout=Layout(width='200px')
)
pre_pixel_connectivity = IntText(
value=8,
description='connectivity type:',
tooltip="Type of pixel connectivity in analysis. Accepted values: 4 or 8.",
disabled=False,
layout=Layout(width='200px')
)
pre_negative_buffer = IntText(
value=-10,
description='negative buffer:',
tooltip="Negative buffer to be applied on the FOI",
disabled=False,
layout=Layout(width='200px')
)
pre_box = VBox([
pre_info, pre_heto_chec, pre_heto_chec_box,
pre_pixel_connectivity, pre_negative_buffer,
HBox([pre_min_cluster_size,
HTML("Minimum area for clusters selection - only clusters bigger from this threshold will be counted.")])
])
# Run FOI analysis
run_info = Label("5. Run the FOI analysis.")
run_analysis = Button(
description='Run FOI v2',
value=False,
disabled=False,
button_style='info',
tooltip='Run FOI analysis version 2',
icon='play',
)
run_box = HBox([run_analysis])
@run_analysis.on_click
def run_analysis_on_click(b):
with progress:
foi_v2.main(
f"{path_foi}vector/{shp_file.children[1].children[0].value}",
f"{path_foi}raster/{img_file.children[1].children[0].value}",
f"{path_foi}{yml_file.children[1].children[0].value}",
pre_negative_buffer.value,
pre_min_het.value,
pre_max_het.value,
pre_pixel_connectivity.value,
pre_min_cluster_size.value)
wbox_v2 = VBox([foi_info,
shp_box,
img_box,
yml_box,
pre_box,
run_info,
run_box,
progress])
return wbox_v2
|
[
"#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n# This file is part of CbM (https://github.com/ec-jrc/cbm).\n# Author : Konstantinos Anastasakis\n# Credits : GTCAP Team\n# Copyright : 2021 European Commission, Joint Research Centre\n# License : 3-Clause BSD\n\n\nimport os\nimport glob\nfrom ipywidgets import (Text, Label, HBox, VBox, Layout, Dropdown,\n ToggleButtons, Output, HTML, Button,\n FileUpload, IntText, RadioButtons)\n\nfrom cbm.utils import config\nfrom cbm.ipycbm.utils import settings_ds, cbm_widgets\nfrom cbm.ipycbm.ipy_ext import ext_func\nfrom cbm.foi import foi_v1\nfrom cbm.datas import db\ntry:\n from cbm.foi import foi_v2\nexcept Exception as err:\n print(err)\n\n\ndef foi_tab_v1():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n path_foi_func = foi_v1.path_foi_func\n\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n\n foi_info = HTML(\"\"\"FOI procedures version 1 (requires access to a database).\n \"\"\", placeholder='FOI Information')\n\n # Connect to database\n\n config_info = HTML(value=\"\"\"1. Connect to database and object storage.<br>\n FOI procedures need direct access to the database. In case there no\n image is provided, access to object storage will be needed as well\n to generate the base image from sentinel images.\n \"\"\", placeholder='FOI Information')\n config_conn = Button(\n value=False,\n button_style='info',\n tooltip='Configure db connection.',\n icon='cogs',\n layout=Layout(width='40px')\n )\n\n config_conn_box = HBox([])\n\n @config_conn.on_click\n def config_conn_on_click(b):\n if config_conn_box.children == ():\n config_conn_box.children = [settings_ds.direct_conn()]\n else:\n config_conn_box.children = ()\n\n config_box = VBox([config_info, config_conn,\n config_conn_box])\n\n # Spatial data to be tested\n spatial_info = HTML(\n \"\"\"2. Select the spatial data to be tested - parcels that will be\n checked for heterogeneity and cardinality.<br>\n - Select a table from the database\"\"\")\n\n db_tables = Dropdown(\n options=[],\n description='db Tables:'\n )\n refresh_db_tables = Button(\n value=False,\n button_style='info',\n tooltip='Get db tables.',\n icon='refresh',\n layout=Layout(width='40px')\n )\n\n @refresh_db_tables.on_click\n def refresh_db_tables_on_click(b):\n db_tables.options = db.tables(config.get_value(['set', 'db_conn']))\n\n db_tables_box = HBox([db_tables, refresh_db_tables])\n\n upload_shp = Button(\n description='Create new table',\n value=False,\n button_style='info',\n tooltip='upload_shp.',\n icon='up'\n )\n\n upload_box = VBox([])\n\n @upload_shp.on_click\n def upload_shp_on_click(b):\n if upload_box.children == ():\n upload_box.children = [ext_func.upload_shp(path_foi, True)]\n else:\n upload_box.children = ()\n spatial_box = VBox([spatial_info, upload_shp, upload_box, db_tables_box])\n\n # Thematic raster.\n img_info = HTML(\n \"\"\"3. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\")\n img_option = ToggleButtons(\n options=['Upload', 'Generate'],\n value=None,\n disabled=True,\n button_style='info', # 'success', 'info', 'warning', 'danger' or ''\n tooltips=['Upnload your base image', 'Get from object storage']\n )\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n\n img_file = cbm_widgets.get_files_dropdown(\n f'{path_foi}raster', '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n\n # YAML File upload\n yml_info = HTML(\n \"\"\"4. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\")\n\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n\n # Database functions\n dbf_info = HTML(\"\"\"5. Create database functions.<br>\n - Import required database functions for FOI analysis to the database\"\"\")\n\n dbf_insert = Button(\n value=False,\n button_style='info',\n tooltip='Create functions.',\n icon='fa-share-square'\n )\n\n @dbf_insert.on_click\n def dbf_insert_on_click(b):\n outlog('path_foi_func :', path_foi_func)\n progress.clear_output()\n try:\n functions = glob.glob(f\"{path_foi_func}*.func\")\n db = config.get_value(['set', 'db_conn'])\n sche = config.get_value(['db', db, 'sche'])\n user = config.get_value(['db', db, 'user'])\n\n for f in functions:\n db.insert_function(open(f).read().format(\n schema=sche, owner=user))\n outlog(f\"The '{f}' Was imported to the database.\")\n finc_list = [\n f\"ipycbm_{f.split('/')[-1].split('.')[0]}, \" for f in functions]\n outlog(\n f\"The functions: {('').join(finc_list)} where added to the database\")\n except Exception as err:\n outlog(\"Could not add functions to dattabase.\", err)\n\n dbf_box = VBox(\n [dbf_info, dbf_insert])\n\n # FOI Parameters\n param_info = HTML(\n \"\"\"6. Set FOI v1 Parameters\"\"\")\n\n # heterogeneity_threshold\n param_heto_info = HTML(\"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\")\n param_min_het = IntText(\n value=30,\n description='MIN:',\n tooltip=\"Minimum threshold for heterogeneity checks\",\n layout=Layout(width='150px')\n )\n param_max_het = IntText(\n value=70,\n description='MAX:',\n tooltip=\"Maximum threshold for heterogeneity checks\",\n layout=Layout(width='150px')\n )\n\n param_area_info = HTML(\"\"\"Minimum area for clusters selection -\n only clusters bigger from this threshold will be counted.\n \"\"\")\n param_area = IntText(\n value=2000,\n description='area:',\n tooltip=\"Minimum area for clusters selection.\",\n layout=Layout(width='200px')\n )\n\n param_box = VBox([param_info,\n param_heto_info, HBox([param_min_het, param_max_het]),\n param_area_info, param_area\n ])\n\n # Run FOI analysis\n run_info = Label(\"7. Run the FOI analysis.\")\n run_analysis = Button(\n description='Run FOI v1',\n value=False,\n button_style='info',\n tooltip='Run FOI analysis version 1',\n icon='play',\n )\n run_box = VBox([run_info, run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v1.main(\n db_tables.value,\n f\"{path_foi}raster/{img_file.children[1].children[0].value}\",\n f\"{path_foi}{yml_file.children[1].children[0].value}\",\n param_min_het.value, param_max_het.value, param_area.value)\n\n wbox = VBox([foi_info,\n config_box,\n spatial_box,\n img_box,\n yml_box,\n dbf_box,\n param_box,\n run_box,\n progress])\n\n return wbox\n\n\ndef foi_tab_v2():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n\n foi_info = HTML(\"\"\"FOI procedures version 2 (does not require access to a database).\n \"\"\", placeholder='FOI Information')\n\n # Vector file\n shp_info = HTML(\n \"\"\"1. Spatial data to be tested -\n parcels that will be checked for heterogeneity and cardinality.\"\"\")\n shp_file = cbm_widgets.get_files_dropdown(\n f'{path_foi}vector', '', 'Select .shp', True, True)\n shp_box = VBox([shp_info, shp_file])\n\n # Thematic raster.\n img_info = HTML(\n \"\"\"2. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\")\n img_option = ToggleButtons(\n options=['Upload', 'Generate'],\n value=None,\n disabled=True,\n button_style='', # 'success', 'info', 'warning', 'danger' or ''\n tooltips=['Upnload your base image', 'Get from object storage']\n )\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(\n f'{path_foi}raster', '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n\n # YAML File upload\n yml_info = HTML(\n \"\"\"3. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\")\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n\n # FOI Prerequisites\n pre_info = Label(\"4. Set FOI v2 Parameters.\")\n\n # heterogeneity_threshold\n pre_heto_chec = HTML(\"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\")\n pre_min_het = IntText(\n value=30,\n description='MIN:',\n tooltip=\"Minimum threshold for heterogeneity checks\",\n disabled=False,\n layout=Layout(width='150px')\n )\n pre_max_het = IntText(\n value=70,\n description='MAX:',\n tooltip=\"Maximum threshold for heterogeneity checks\",\n disabled=False,\n layout=Layout(width='150px')\n )\n pre_heto_chec_box = HBox([pre_min_het, pre_max_het])\n pre_min_cluster_size = IntText(\n value=20,\n description='pixels:',\n tooltip=\"Minimum area for clusters selection.\",\n disabled=False,\n layout=Layout(width='200px')\n )\n pre_pixel_connectivity = IntText(\n value=8,\n description='connectivity type:',\n tooltip=\"Type of pixel connectivity in analysis. Accepted values: 4 or 8.\",\n disabled=False,\n layout=Layout(width='200px')\n )\n pre_negative_buffer = IntText(\n value=-10,\n description='negative buffer:',\n tooltip=\"Negative buffer to be applied on the FOI\",\n disabled=False,\n layout=Layout(width='200px')\n )\n\n pre_box = VBox([\n pre_info, pre_heto_chec, pre_heto_chec_box,\n pre_pixel_connectivity, pre_negative_buffer,\n HBox([pre_min_cluster_size,\n HTML(\"Minimum area for clusters selection - only clusters bigger from this threshold will be counted.\")])\n ])\n\n # Run FOI analysis\n run_info = Label(\"5. Run the FOI analysis.\")\n run_analysis = Button(\n description='Run FOI v2',\n value=False,\n disabled=False,\n button_style='info',\n tooltip='Run FOI analysis version 2',\n icon='play',\n )\n run_box = HBox([run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v2.main(\n f\"{path_foi}vector/{shp_file.children[1].children[0].value}\",\n f\"{path_foi}raster/{img_file.children[1].children[0].value}\",\n f\"{path_foi}{yml_file.children[1].children[0].value}\",\n pre_negative_buffer.value,\n pre_min_het.value,\n pre_max_het.value,\n pre_pixel_connectivity.value,\n pre_min_cluster_size.value)\n\n wbox_v2 = VBox([foi_info,\n shp_box,\n img_box,\n yml_box,\n pre_box,\n run_info,\n run_box,\n progress])\n\n return wbox_v2\n",
"import os\nimport glob\nfrom ipywidgets import Text, Label, HBox, VBox, Layout, Dropdown, ToggleButtons, Output, HTML, Button, FileUpload, IntText, RadioButtons\nfrom cbm.utils import config\nfrom cbm.ipycbm.utils import settings_ds, cbm_widgets\nfrom cbm.ipycbm.ipy_ext import ext_func\nfrom cbm.foi import foi_v1\nfrom cbm.datas import db\ntry:\n from cbm.foi import foi_v2\nexcept Exception as err:\n print(err)\n\n\ndef foi_tab_v1():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n path_foi_func = foi_v1.path_foi_func\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 1 (requires access to a database).\\n ',\n placeholder='FOI Information')\n config_info = HTML(value=\n \"\"\"1. Connect to database and object storage.<br>\n FOI procedures need direct access to the database. In case there no\n image is provided, access to object storage will be needed as well\n to generate the base image from sentinel images.\n \"\"\"\n , placeholder='FOI Information')\n config_conn = Button(value=False, button_style='info', tooltip=\n 'Configure db connection.', icon='cogs', layout=Layout(width='40px'))\n config_conn_box = HBox([])\n\n @config_conn.on_click\n def config_conn_on_click(b):\n if config_conn_box.children == ():\n config_conn_box.children = [settings_ds.direct_conn()]\n else:\n config_conn_box.children = ()\n config_box = VBox([config_info, config_conn, config_conn_box])\n spatial_info = HTML(\n \"\"\"2. Select the spatial data to be tested - parcels that will be\n checked for heterogeneity and cardinality.<br>\n - Select a table from the database\"\"\"\n )\n db_tables = Dropdown(options=[], description='db Tables:')\n refresh_db_tables = Button(value=False, button_style='info', tooltip=\n 'Get db tables.', icon='refresh', layout=Layout(width='40px'))\n\n @refresh_db_tables.on_click\n def refresh_db_tables_on_click(b):\n db_tables.options = db.tables(config.get_value(['set', 'db_conn']))\n db_tables_box = HBox([db_tables, refresh_db_tables])\n upload_shp = Button(description='Create new table', value=False,\n button_style='info', tooltip='upload_shp.', icon='up')\n upload_box = VBox([])\n\n @upload_shp.on_click\n def upload_shp_on_click(b):\n if upload_box.children == ():\n upload_box.children = [ext_func.upload_shp(path_foi, True)]\n else:\n upload_box.children = ()\n spatial_box = VBox([spatial_info, upload_shp, upload_box, db_tables_box])\n img_info = HTML(\n \"\"\"3. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='info', tooltips=[\n 'Upnload your base image', 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"4. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n dbf_info = HTML(\n \"\"\"5. Create database functions.<br>\n - Import required database functions for FOI analysis to the database\"\"\"\n )\n dbf_insert = Button(value=False, button_style='info', tooltip=\n 'Create functions.', icon='fa-share-square')\n\n @dbf_insert.on_click\n def dbf_insert_on_click(b):\n outlog('path_foi_func :', path_foi_func)\n progress.clear_output()\n try:\n functions = glob.glob(f'{path_foi_func}*.func')\n db = config.get_value(['set', 'db_conn'])\n sche = config.get_value(['db', db, 'sche'])\n user = config.get_value(['db', db, 'user'])\n for f in functions:\n db.insert_function(open(f).read().format(schema=sche, owner\n =user))\n outlog(f\"The '{f}' Was imported to the database.\")\n finc_list = [f\"ipycbm_{f.split('/')[-1].split('.')[0]}, \" for f in\n functions]\n outlog(\n f\"The functions: {''.join(finc_list)} where added to the database\"\n )\n except Exception as err:\n outlog('Could not add functions to dattabase.', err)\n dbf_box = VBox([dbf_info, dbf_insert])\n param_info = HTML('6. Set FOI v1 Parameters')\n param_heto_info = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n param_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_area_info = HTML(\n \"\"\"Minimum area for clusters selection -\n only clusters bigger from this threshold will be counted.\n \"\"\"\n )\n param_area = IntText(value=2000, description='area:', tooltip=\n 'Minimum area for clusters selection.', layout=Layout(width='200px'))\n param_box = VBox([param_info, param_heto_info, HBox([param_min_het,\n param_max_het]), param_area_info, param_area])\n run_info = Label('7. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v1', value=False,\n button_style='info', tooltip='Run FOI analysis version 1', icon='play')\n run_box = VBox([run_info, run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v1.main(db_tables.value,\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n param_min_het.value, param_max_het.value, param_area.value)\n wbox = VBox([foi_info, config_box, spatial_box, img_box, yml_box,\n dbf_box, param_box, run_box, progress])\n return wbox\n\n\ndef foi_tab_v2():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 2 (does not require access to a database).\\n '\n , placeholder='FOI Information')\n shp_info = HTML(\n \"\"\"1. Spatial data to be tested -\n parcels that will be checked for heterogeneity and cardinality.\"\"\"\n )\n shp_file = cbm_widgets.get_files_dropdown(f'{path_foi}vector', '',\n 'Select .shp', True, True)\n shp_box = VBox([shp_info, shp_file])\n img_info = HTML(\n \"\"\"2. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='', tooltips=['Upnload your base image',\n 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"3. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n pre_info = Label('4. Set FOI v2 Parameters.')\n pre_heto_chec = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n pre_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_heto_chec_box = HBox([pre_min_het, pre_max_het])\n pre_min_cluster_size = IntText(value=20, description='pixels:', tooltip\n ='Minimum area for clusters selection.', disabled=False, layout=\n Layout(width='200px'))\n pre_pixel_connectivity = IntText(value=8, description=\n 'connectivity type:', tooltip=\n 'Type of pixel connectivity in analysis. Accepted values: 4 or 8.',\n disabled=False, layout=Layout(width='200px'))\n pre_negative_buffer = IntText(value=-10, description='negative buffer:',\n tooltip='Negative buffer to be applied on the FOI', disabled=False,\n layout=Layout(width='200px'))\n pre_box = VBox([pre_info, pre_heto_chec, pre_heto_chec_box,\n pre_pixel_connectivity, pre_negative_buffer, HBox([\n pre_min_cluster_size, HTML(\n 'Minimum area for clusters selection - only clusters bigger from this threshold will be counted.'\n )])])\n run_info = Label('5. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v2', value=False, disabled=\n False, button_style='info', tooltip='Run FOI analysis version 2',\n icon='play')\n run_box = HBox([run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v2.main(\n f'{path_foi}vector/{shp_file.children[1].children[0].value}',\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n pre_negative_buffer.value, pre_min_het.value, pre_max_het.\n value, pre_pixel_connectivity.value, pre_min_cluster_size.value\n )\n wbox_v2 = VBox([foi_info, shp_box, img_box, yml_box, pre_box, run_info,\n run_box, progress])\n return wbox_v2\n",
"<import token>\ntry:\n from cbm.foi import foi_v2\nexcept Exception as err:\n print(err)\n\n\ndef foi_tab_v1():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n path_foi_func = foi_v1.path_foi_func\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 1 (requires access to a database).\\n ',\n placeholder='FOI Information')\n config_info = HTML(value=\n \"\"\"1. Connect to database and object storage.<br>\n FOI procedures need direct access to the database. In case there no\n image is provided, access to object storage will be needed as well\n to generate the base image from sentinel images.\n \"\"\"\n , placeholder='FOI Information')\n config_conn = Button(value=False, button_style='info', tooltip=\n 'Configure db connection.', icon='cogs', layout=Layout(width='40px'))\n config_conn_box = HBox([])\n\n @config_conn.on_click\n def config_conn_on_click(b):\n if config_conn_box.children == ():\n config_conn_box.children = [settings_ds.direct_conn()]\n else:\n config_conn_box.children = ()\n config_box = VBox([config_info, config_conn, config_conn_box])\n spatial_info = HTML(\n \"\"\"2. Select the spatial data to be tested - parcels that will be\n checked for heterogeneity and cardinality.<br>\n - Select a table from the database\"\"\"\n )\n db_tables = Dropdown(options=[], description='db Tables:')\n refresh_db_tables = Button(value=False, button_style='info', tooltip=\n 'Get db tables.', icon='refresh', layout=Layout(width='40px'))\n\n @refresh_db_tables.on_click\n def refresh_db_tables_on_click(b):\n db_tables.options = db.tables(config.get_value(['set', 'db_conn']))\n db_tables_box = HBox([db_tables, refresh_db_tables])\n upload_shp = Button(description='Create new table', value=False,\n button_style='info', tooltip='upload_shp.', icon='up')\n upload_box = VBox([])\n\n @upload_shp.on_click\n def upload_shp_on_click(b):\n if upload_box.children == ():\n upload_box.children = [ext_func.upload_shp(path_foi, True)]\n else:\n upload_box.children = ()\n spatial_box = VBox([spatial_info, upload_shp, upload_box, db_tables_box])\n img_info = HTML(\n \"\"\"3. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='info', tooltips=[\n 'Upnload your base image', 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"4. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n dbf_info = HTML(\n \"\"\"5. Create database functions.<br>\n - Import required database functions for FOI analysis to the database\"\"\"\n )\n dbf_insert = Button(value=False, button_style='info', tooltip=\n 'Create functions.', icon='fa-share-square')\n\n @dbf_insert.on_click\n def dbf_insert_on_click(b):\n outlog('path_foi_func :', path_foi_func)\n progress.clear_output()\n try:\n functions = glob.glob(f'{path_foi_func}*.func')\n db = config.get_value(['set', 'db_conn'])\n sche = config.get_value(['db', db, 'sche'])\n user = config.get_value(['db', db, 'user'])\n for f in functions:\n db.insert_function(open(f).read().format(schema=sche, owner\n =user))\n outlog(f\"The '{f}' Was imported to the database.\")\n finc_list = [f\"ipycbm_{f.split('/')[-1].split('.')[0]}, \" for f in\n functions]\n outlog(\n f\"The functions: {''.join(finc_list)} where added to the database\"\n )\n except Exception as err:\n outlog('Could not add functions to dattabase.', err)\n dbf_box = VBox([dbf_info, dbf_insert])\n param_info = HTML('6. Set FOI v1 Parameters')\n param_heto_info = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n param_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_area_info = HTML(\n \"\"\"Minimum area for clusters selection -\n only clusters bigger from this threshold will be counted.\n \"\"\"\n )\n param_area = IntText(value=2000, description='area:', tooltip=\n 'Minimum area for clusters selection.', layout=Layout(width='200px'))\n param_box = VBox([param_info, param_heto_info, HBox([param_min_het,\n param_max_het]), param_area_info, param_area])\n run_info = Label('7. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v1', value=False,\n button_style='info', tooltip='Run FOI analysis version 1', icon='play')\n run_box = VBox([run_info, run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v1.main(db_tables.value,\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n param_min_het.value, param_max_het.value, param_area.value)\n wbox = VBox([foi_info, config_box, spatial_box, img_box, yml_box,\n dbf_box, param_box, run_box, progress])\n return wbox\n\n\ndef foi_tab_v2():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 2 (does not require access to a database).\\n '\n , placeholder='FOI Information')\n shp_info = HTML(\n \"\"\"1. Spatial data to be tested -\n parcels that will be checked for heterogeneity and cardinality.\"\"\"\n )\n shp_file = cbm_widgets.get_files_dropdown(f'{path_foi}vector', '',\n 'Select .shp', True, True)\n shp_box = VBox([shp_info, shp_file])\n img_info = HTML(\n \"\"\"2. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='', tooltips=['Upnload your base image',\n 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"3. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n pre_info = Label('4. Set FOI v2 Parameters.')\n pre_heto_chec = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n pre_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_heto_chec_box = HBox([pre_min_het, pre_max_het])\n pre_min_cluster_size = IntText(value=20, description='pixels:', tooltip\n ='Minimum area for clusters selection.', disabled=False, layout=\n Layout(width='200px'))\n pre_pixel_connectivity = IntText(value=8, description=\n 'connectivity type:', tooltip=\n 'Type of pixel connectivity in analysis. Accepted values: 4 or 8.',\n disabled=False, layout=Layout(width='200px'))\n pre_negative_buffer = IntText(value=-10, description='negative buffer:',\n tooltip='Negative buffer to be applied on the FOI', disabled=False,\n layout=Layout(width='200px'))\n pre_box = VBox([pre_info, pre_heto_chec, pre_heto_chec_box,\n pre_pixel_connectivity, pre_negative_buffer, HBox([\n pre_min_cluster_size, HTML(\n 'Minimum area for clusters selection - only clusters bigger from this threshold will be counted.'\n )])])\n run_info = Label('5. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v2', value=False, disabled=\n False, button_style='info', tooltip='Run FOI analysis version 2',\n icon='play')\n run_box = HBox([run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v2.main(\n f'{path_foi}vector/{shp_file.children[1].children[0].value}',\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n pre_negative_buffer.value, pre_min_het.value, pre_max_het.\n value, pre_pixel_connectivity.value, pre_min_cluster_size.value\n )\n wbox_v2 = VBox([foi_info, shp_box, img_box, yml_box, pre_box, run_info,\n run_box, progress])\n return wbox_v2\n",
"<import token>\n<code token>\n\n\ndef foi_tab_v1():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n path_foi_func = foi_v1.path_foi_func\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 1 (requires access to a database).\\n ',\n placeholder='FOI Information')\n config_info = HTML(value=\n \"\"\"1. Connect to database and object storage.<br>\n FOI procedures need direct access to the database. In case there no\n image is provided, access to object storage will be needed as well\n to generate the base image from sentinel images.\n \"\"\"\n , placeholder='FOI Information')\n config_conn = Button(value=False, button_style='info', tooltip=\n 'Configure db connection.', icon='cogs', layout=Layout(width='40px'))\n config_conn_box = HBox([])\n\n @config_conn.on_click\n def config_conn_on_click(b):\n if config_conn_box.children == ():\n config_conn_box.children = [settings_ds.direct_conn()]\n else:\n config_conn_box.children = ()\n config_box = VBox([config_info, config_conn, config_conn_box])\n spatial_info = HTML(\n \"\"\"2. Select the spatial data to be tested - parcels that will be\n checked for heterogeneity and cardinality.<br>\n - Select a table from the database\"\"\"\n )\n db_tables = Dropdown(options=[], description='db Tables:')\n refresh_db_tables = Button(value=False, button_style='info', tooltip=\n 'Get db tables.', icon='refresh', layout=Layout(width='40px'))\n\n @refresh_db_tables.on_click\n def refresh_db_tables_on_click(b):\n db_tables.options = db.tables(config.get_value(['set', 'db_conn']))\n db_tables_box = HBox([db_tables, refresh_db_tables])\n upload_shp = Button(description='Create new table', value=False,\n button_style='info', tooltip='upload_shp.', icon='up')\n upload_box = VBox([])\n\n @upload_shp.on_click\n def upload_shp_on_click(b):\n if upload_box.children == ():\n upload_box.children = [ext_func.upload_shp(path_foi, True)]\n else:\n upload_box.children = ()\n spatial_box = VBox([spatial_info, upload_shp, upload_box, db_tables_box])\n img_info = HTML(\n \"\"\"3. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='info', tooltips=[\n 'Upnload your base image', 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"4. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n dbf_info = HTML(\n \"\"\"5. Create database functions.<br>\n - Import required database functions for FOI analysis to the database\"\"\"\n )\n dbf_insert = Button(value=False, button_style='info', tooltip=\n 'Create functions.', icon='fa-share-square')\n\n @dbf_insert.on_click\n def dbf_insert_on_click(b):\n outlog('path_foi_func :', path_foi_func)\n progress.clear_output()\n try:\n functions = glob.glob(f'{path_foi_func}*.func')\n db = config.get_value(['set', 'db_conn'])\n sche = config.get_value(['db', db, 'sche'])\n user = config.get_value(['db', db, 'user'])\n for f in functions:\n db.insert_function(open(f).read().format(schema=sche, owner\n =user))\n outlog(f\"The '{f}' Was imported to the database.\")\n finc_list = [f\"ipycbm_{f.split('/')[-1].split('.')[0]}, \" for f in\n functions]\n outlog(\n f\"The functions: {''.join(finc_list)} where added to the database\"\n )\n except Exception as err:\n outlog('Could not add functions to dattabase.', err)\n dbf_box = VBox([dbf_info, dbf_insert])\n param_info = HTML('6. Set FOI v1 Parameters')\n param_heto_info = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n param_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', layout=Layout(width=\n '150px'))\n param_area_info = HTML(\n \"\"\"Minimum area for clusters selection -\n only clusters bigger from this threshold will be counted.\n \"\"\"\n )\n param_area = IntText(value=2000, description='area:', tooltip=\n 'Minimum area for clusters selection.', layout=Layout(width='200px'))\n param_box = VBox([param_info, param_heto_info, HBox([param_min_het,\n param_max_het]), param_area_info, param_area])\n run_info = Label('7. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v1', value=False,\n button_style='info', tooltip='Run FOI analysis version 1', icon='play')\n run_box = VBox([run_info, run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v1.main(db_tables.value,\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n param_min_het.value, param_max_het.value, param_area.value)\n wbox = VBox([foi_info, config_box, spatial_box, img_box, yml_box,\n dbf_box, param_box, run_box, progress])\n return wbox\n\n\ndef foi_tab_v2():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 2 (does not require access to a database).\\n '\n , placeholder='FOI Information')\n shp_info = HTML(\n \"\"\"1. Spatial data to be tested -\n parcels that will be checked for heterogeneity and cardinality.\"\"\"\n )\n shp_file = cbm_widgets.get_files_dropdown(f'{path_foi}vector', '',\n 'Select .shp', True, True)\n shp_box = VBox([shp_info, shp_file])\n img_info = HTML(\n \"\"\"2. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='', tooltips=['Upnload your base image',\n 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"3. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n pre_info = Label('4. Set FOI v2 Parameters.')\n pre_heto_chec = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n pre_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_heto_chec_box = HBox([pre_min_het, pre_max_het])\n pre_min_cluster_size = IntText(value=20, description='pixels:', tooltip\n ='Minimum area for clusters selection.', disabled=False, layout=\n Layout(width='200px'))\n pre_pixel_connectivity = IntText(value=8, description=\n 'connectivity type:', tooltip=\n 'Type of pixel connectivity in analysis. Accepted values: 4 or 8.',\n disabled=False, layout=Layout(width='200px'))\n pre_negative_buffer = IntText(value=-10, description='negative buffer:',\n tooltip='Negative buffer to be applied on the FOI', disabled=False,\n layout=Layout(width='200px'))\n pre_box = VBox([pre_info, pre_heto_chec, pre_heto_chec_box,\n pre_pixel_connectivity, pre_negative_buffer, HBox([\n pre_min_cluster_size, HTML(\n 'Minimum area for clusters selection - only clusters bigger from this threshold will be counted.'\n )])])\n run_info = Label('5. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v2', value=False, disabled=\n False, button_style='info', tooltip='Run FOI analysis version 2',\n icon='play')\n run_box = HBox([run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v2.main(\n f'{path_foi}vector/{shp_file.children[1].children[0].value}',\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n pre_negative_buffer.value, pre_min_het.value, pre_max_het.\n value, pre_pixel_connectivity.value, pre_min_cluster_size.value\n )\n wbox_v2 = VBox([foi_info, shp_box, img_box, yml_box, pre_box, run_info,\n run_box, progress])\n return wbox_v2\n",
"<import token>\n<code token>\n<function token>\n\n\ndef foi_tab_v2():\n path_foi = f\"{config.get_value(['paths', 'temp'])}/foi/\"\n progress = Output()\n\n def outlog(*text):\n with progress:\n print(*text)\n foi_info = HTML(\n 'FOI procedures version 2 (does not require access to a database).\\n '\n , placeholder='FOI Information')\n shp_info = HTML(\n \"\"\"1. Spatial data to be tested -\n parcels that will be checked for heterogeneity and cardinality.\"\"\"\n )\n shp_file = cbm_widgets.get_files_dropdown(f'{path_foi}vector', '',\n 'Select .shp', True, True)\n shp_box = VBox([shp_info, shp_file])\n img_info = HTML(\n \"\"\"2. Thematic raster - classification raster, or raster from other\n source that will be used for testing heterogeneity and cardinality.<br>\n - Upload or generate raster base image.\n (Only upload is currently available)\"\"\"\n )\n img_option = ToggleButtons(options=['Upload', 'Generate'], value=None,\n disabled=True, button_style='', tooltips=['Upnload your base image',\n 'Get from object storage'])\n\n def on_img_option_change(change):\n if img_option.value == 'Upload':\n img_box.children = [HBox([img_info, img_option, img_file])]\n else:\n img_box.children = ()\n img_option.observe(on_img_option_change, 'value')\n img_file = cbm_widgets.get_files_dropdown(f'{path_foi}raster',\n '.tif, .tiff', 'Select Raster')\n img_box = VBox([img_info, img_option, img_file])\n yml_info = HTML(\n \"\"\"3. YAML file that holds the classes form the thematic raster.<br>\n - This can be also a simple list of values in the notebook\n corespondence between pixel values and names for the classes\"\"\"\n )\n yml_file = cbm_widgets.get_files_dropdown(path_foi, '.yml, .yaml',\n 'Select YML')\n yml_box = VBox([yml_info, yml_file])\n pre_info = Label('4. Set FOI v2 Parameters.')\n pre_heto_chec = HTML(\n \"\"\"\n Minimum and maximum thresholds for heterogeneity checks. In the example,\n any parcel with percentage of pixels for one class between 30 and 70 from\n the total, will be considered heterogenous.\n \"\"\"\n )\n pre_min_het = IntText(value=30, description='MIN:', tooltip=\n 'Minimum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_max_het = IntText(value=70, description='MAX:', tooltip=\n 'Maximum threshold for heterogeneity checks', disabled=False,\n layout=Layout(width='150px'))\n pre_heto_chec_box = HBox([pre_min_het, pre_max_het])\n pre_min_cluster_size = IntText(value=20, description='pixels:', tooltip\n ='Minimum area for clusters selection.', disabled=False, layout=\n Layout(width='200px'))\n pre_pixel_connectivity = IntText(value=8, description=\n 'connectivity type:', tooltip=\n 'Type of pixel connectivity in analysis. Accepted values: 4 or 8.',\n disabled=False, layout=Layout(width='200px'))\n pre_negative_buffer = IntText(value=-10, description='negative buffer:',\n tooltip='Negative buffer to be applied on the FOI', disabled=False,\n layout=Layout(width='200px'))\n pre_box = VBox([pre_info, pre_heto_chec, pre_heto_chec_box,\n pre_pixel_connectivity, pre_negative_buffer, HBox([\n pre_min_cluster_size, HTML(\n 'Minimum area for clusters selection - only clusters bigger from this threshold will be counted.'\n )])])\n run_info = Label('5. Run the FOI analysis.')\n run_analysis = Button(description='Run FOI v2', value=False, disabled=\n False, button_style='info', tooltip='Run FOI analysis version 2',\n icon='play')\n run_box = HBox([run_analysis])\n\n @run_analysis.on_click\n def run_analysis_on_click(b):\n with progress:\n foi_v2.main(\n f'{path_foi}vector/{shp_file.children[1].children[0].value}',\n f'{path_foi}raster/{img_file.children[1].children[0].value}',\n f'{path_foi}{yml_file.children[1].children[0].value}',\n pre_negative_buffer.value, pre_min_het.value, pre_max_het.\n value, pre_pixel_connectivity.value, pre_min_cluster_size.value\n )\n wbox_v2 = VBox([foi_info, shp_box, img_box, yml_box, pre_box, run_info,\n run_box, progress])\n return wbox_v2\n",
"<import token>\n<code token>\n<function token>\n<function token>\n"
] | false |
9,780 |
c2f859e0ed0e812768dec04b2b1f9ddd349350f6
|
# open a converted base to bits file and convert it back to the base sequences
seq2 = ''
with open('chr01.txt') as a:
while 1:
seq = a.read(2)
# print(seq)
seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a').replace('11', 't')
seq2 += seq
if not seq:
break
print(len(seq2))
print(seq2)
|
[
"# open a converted base to bits file and convert it back to the base sequences\n\nseq2 = ''\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n # print(seq)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a').replace('11', 't')\n seq2 += seq\n if not seq:\n break\n\nprint(len(seq2))\nprint(seq2)\n",
"seq2 = ''\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a'\n ).replace('11', 't')\n seq2 += seq\n if not seq:\n break\nprint(len(seq2))\nprint(seq2)\n",
"<assignment token>\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a'\n ).replace('11', 't')\n seq2 += seq\n if not seq:\n break\nprint(len(seq2))\nprint(seq2)\n",
"<assignment token>\n<code token>\n"
] | false |
9,781 |
451a36eb205a269a05e3b3d89541278633d12aaa
|
class ChartType:
Vanilla = "Vanilla"
Neopolitan = "Neopolitan"
|
[
"\n\nclass ChartType:\n Vanilla = \"Vanilla\"\n Neopolitan = \"Neopolitan\"\n",
"class ChartType:\n Vanilla = 'Vanilla'\n Neopolitan = 'Neopolitan'\n",
"class ChartType:\n <assignment token>\n <assignment token>\n",
"<class token>\n"
] | false |
9,782 |
4ed6f4db4c9c3319d6289ba402f81bbd8accf915
|
import numpy as np
import dxchange
import ptychotomo
if __name__ == "__main__":
# read object
u = dxchange.read_tiff('data/init_object.tiff')
u = u+1j*u/2
nz, n, _ = u.shape
# parameters
center = n/2
ntheta = 384
ne = 3*n//2
ngpus = 1
pnz = nz//2
theta = np.linspace(0, 4*np.pi, ntheta).astype('float32')
# simulate data
with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus) as tslv:
data = tslv.fwd_tomo_batch(u)
# adjoint test with data padding
with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center+(ne-n)/2, ngpus) as tslv:
data = ptychotomo.utils.paddata(data, ne)
ua = tslv.adj_tomo_batch(data)
ua = ptychotomo.utils.unpadobject(ua, n)
print(f'norm data = {np.linalg.norm(data)}')
print(f'norm object = {np.linalg.norm(ua)}')
print(
f'<u,R*Ru>=<Ru,Ru>: {np.sum(u*np.conj(ua)):e} ? {np.sum(data*np.conj(data)):e}')
|
[
"import numpy as np\nimport dxchange\nimport ptychotomo\n\nif __name__ == \"__main__\":\n \n # read object\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u+1j*u/2\n\n nz, n, _ = u.shape\n\n # parameters\n center = n/2\n ntheta = 384\n ne = 3*n//2\n ngpus = 1\n pnz = nz//2\n theta = np.linspace(0, 4*np.pi, ntheta).astype('float32')\n\n # simulate data\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus) as tslv:\n data = tslv.fwd_tomo_batch(u)\n\n # adjoint test with data padding\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center+(ne-n)/2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u*np.conj(ua)):e} ? {np.sum(data*np.conj(data)):e}')\n",
"import numpy as np\nimport dxchange\nimport ptychotomo\nif __name__ == '__main__':\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u + 1.0j * u / 2\n nz, n, _ = u.shape\n center = n / 2\n ntheta = 384\n ne = 3 * n // 2\n ngpus = 1\n pnz = nz // 2\n theta = np.linspace(0, 4 * np.pi, ntheta).astype('float32')\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus\n ) as tslv:\n data = tslv.fwd_tomo_batch(u)\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center + (ne - n\n ) / 2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u * np.conj(ua)):e} ? {np.sum(data * np.conj(data)):e}'\n )\n",
"<import token>\nif __name__ == '__main__':\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u + 1.0j * u / 2\n nz, n, _ = u.shape\n center = n / 2\n ntheta = 384\n ne = 3 * n // 2\n ngpus = 1\n pnz = nz // 2\n theta = np.linspace(0, 4 * np.pi, ntheta).astype('float32')\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus\n ) as tslv:\n data = tslv.fwd_tomo_batch(u)\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center + (ne - n\n ) / 2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u * np.conj(ua)):e} ? {np.sum(data * np.conj(data)):e}'\n )\n",
"<import token>\n<code token>\n"
] | false |
9,783 |
21c581131cff8cf2f4aa407055184d56865a6335
|
#!/usr/bin/env python
# Title : STACK_BostonHousing.py
# Description : Stacking was the natural progression of our algorithms trial.
# In here, we'll use prediction from a number of models in order
# to improve accuracy as it add linearly independent data to our
# dataset. Here we also use voting ensembler, using the best es-
# timator three timers on the stack of second level models.
# We'll find CV scores of each model on train_test_split then
# stack the models on a 5-KFold of the data, finding final CV
# score. We'll also plot the comparative graph of Real Prices vs
# Predicted Prices
# Author : Neves4
# Outputs : Figure with one plot : 'Real Prices vs Predicted prices'
# Values : SVR CV Scores: 0.6798 (+/- 0.0895)
# XGB CV Scores: 0.8784 (+/- 0.0598)
# RF CV Scores: 0.8601 (+/- 0.0789)
# STACK CV Scores: 0.8809 (+/- 0.0864)
# License : MIT License
#==============================================================================
##### IMPORTING #####
import numpy as np
import xgboost as xgb
from sklearn import datasets
import seaborn as sns
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import ElasticNet
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
from sklearn.model_selection import cross_val_score, train_test_split, KFold
from sklearn.metrics import r2_score
sns.set() # set seaborn style
##### DECLARING AND TRAINING #####
# Carregamento do dataset do boston, conversão para o framework pandas e como a
# nomenclatura não é automática, foi dado valor às colunas da tabela do pandas.
# Para verificar como estão os dados, chamar print(boston_pd.head())
boston = datasets.load_boston()
boston_pd = pd.DataFrame(boston.data)
boston_pd.columns = boston.feature_names
# É necessária então a divisão dos datasets, pelo método train_test_split. Para
# encontrar o tamanho de cada tensor que foi dividido, print(X_train.shape)
X, Y = boston_pd, boston.target
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.1,
random_state = 42)
# ##### 1ST LEVEL MODELS #####
# # ElasticNet - baseline model #0
# print("------- FITTING ElasticNet -------")
# en_mdl = ElasticNet(alpha = 5.2, l1_ratio = 0.5, random_state = 42)
# en_cv_scores = cross_val_score(en_mdl, X_train, Y_train, cv=5, scoring='r2')
# print(" DONE! CV Scores: {:.4f} (+/- {:.4f})" .format(en_cv_scores.mean(),\
# en_cv_scores.std() * 2))
# SVR - baseline model #1
print("------- FITTING SVR -------")
svr_mdl = SVR(kernel = 'linear', C = 0.11, epsilon = 0.011, gamma = 0.1)
svr_cv_scores = cross_val_score(svr_mdl, X_train, Y_train, cv=5, scoring='r2')
print(" DONE! CV Scores: {:.4f} (+/- {:.4f})" .format(svr_cv_scores.mean(),\
svr_cv_scores.std() * 2))
# XGBRegressor - baseline model #2
print("------- FITTING XGBRegressor -------")
xgb_mdl = xgb.XGBRegressor(learning_rate = 0.0503, n_estimators = 339,
max_depth = 5, min_child_weight = 2, gamma = 0.17,
subsample = 0.84, colsample_bytree = 0.85,
reg_alpha = 0.008, reg_lambda = 1.2,
scale_pos_weight = 1, seed = 42)
xgb_cv_scores = cross_val_score(xgb_mdl, X_train, Y_train, cv=5, scoring='r2')
print(" DONE! CV Scores: {:.4f} (+/- {:.4f})" .format(xgb_cv_scores.mean(),\
xgb_cv_scores.std() * 2))
# RandomForestRegressor - baseline model #3
print("------- FITTING RandomForestRegressor -------")
rf_mdl = RandomForestRegressor(n_estimators = 95, max_features = 'auto',
max_depth = 18, min_samples_split = 2,
min_samples_leaf = 1, bootstrap = True,
random_state = 42)
rf_cv_scores = cross_val_score(rf_mdl, X_train, Y_train, cv=5, scoring='r2')
print(" DONE! CV Scores: {:.4f} (+/- {:.4f})" .format(rf_cv_scores.mean(),\
rf_cv_scores.std() * 2))
class Ensemble(object):
"""Ensemble base_models on train data than fit/predict
The object input is composed of 'n_splits', 'stacker' and list of
'base_models'.
The __init__ method self-assign the inputs.
The fit_predict method divides the dataset in 'n_splits' then it loops
trough ammount of 'base_models' fitting all splits and then averaging it on
a new column in the end. In the end, predictions are made with these new
columns.
If sought the use of voting ensemble, the ammount of models passed on
base_models can be repeated.
"""
def __init__(self, n_splits, stacker, base_models):
self.n_splits = n_splits
self.stacker = stacker
self.base_models = base_models
def fit_predict(self, X, Y, T):
X = np.array(X)
Y = np.array(Y)
T = np.array(T)
# Create folds on the dataset based on n_splits
folds = list(KFold(n_splits = self.n_splits, shuffle = True,
random_state = 42).split(X, Y))
S_train = np.zeros((X.shape[0], len(self.base_models)))
S_test = np.zeros((T.shape[0], len(self.base_models)))
# Loop trough base_models
print("------- FITTING Stacker - 2nd level -------")
for i, clf in enumerate(self.base_models):
# Create a dummy to calculate predictions on all folds
S_test_i = np.zeros((T.shape[0], self.n_splits))
# Loop trough data folds
for j, (train_idx, test_idx) in enumerate(folds):
X_train = X[train_idx]
Y_train = Y[train_idx]
X_holdout = X[test_idx]
Y_holdout = Y[test_idx]
clf.fit(X_train, Y_train)
Y_pred = clf.predict(X_holdout)[:]
print (" Model {}, fold {}. R^2 score: {:.4f}"\
.format(i, j, r2_score(Y_holdout, Y_pred)))
S_train[test_idx, i] = Y_pred
S_test_i[:, j] = clf.predict(T)[:]
# Update test data with average of predictions from the dummy
S_test[:, i] = S_test_i.mean(axis = 1)
# Print final CV score
results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')
print("\033[1;92mDONE! \033[0;0m\033[1;37mCV scores: {:.4f} (+/- {:.4f})"
.format(results.mean(), results.std() * 2))
# After creating new features on the test data, fit the chosen stacker
# on train data and finally predict on test data, then return
self.stacker.fit(S_train, Y)
final_prediction = self.stacker.predict(S_test)[:]
return final_prediction
stack = Ensemble(n_splits = 5, stacker = svr_mdl,
base_models = (xgb_mdl, rf_mdl, xgb_mdl, svr_mdl, xgb_mdl))
stack_pred = stack.fit_predict(X_train, Y_train, X_test)
##### PLOTS #####
# Plot outputs using scatter. Ticks are diabled and everything else is the clea-
# nest that I could. Predicted prices vs Real Prices
custom_style = {'axes.labelcolor': 'white',
'xtick.color': 'white',
'ytick.color': 'white'}
data = pd.DataFrame(data = {'stack_pred': stack_pred, 'Y_test': Y_test})
ax = sns.lmplot(x='Y_test', y='stack_pred', data = data, truncate=True, size=5)
ax.set_axis_labels("Real prices", "Predicted prices")
plt.tick_params(axis='both', colors='gray')
plt.title("Real vs Predicted prices on Boston Housing", fontweight = 'bold')
plt.tight_layout()
plt.show()
|
[
"#!/usr/bin/env python\n# Title : STACK_BostonHousing.py\n# Description : Stacking was the natural progression of our algorithms trial.\n# In here, we'll use prediction from a number of models in order\n# to improve accuracy as it add linearly independent data to our\n# dataset. Here we also use voting ensembler, using the best es-\n# timator three timers on the stack of second level models.\n# We'll find CV scores of each model on train_test_split then\n# stack the models on a 5-KFold of the data, finding final CV\n# score. We'll also plot the comparative graph of Real Prices vs\n# Predicted Prices\n# Author : Neves4\n# Outputs : Figure with one plot : 'Real Prices vs Predicted prices'\n# Values : SVR CV Scores: 0.6798 (+/- 0.0895)\n# XGB CV Scores: 0.8784 (+/- 0.0598)\n# RF CV Scores: 0.8601 (+/- 0.0789)\n# STACK CV Scores: 0.8809 (+/- 0.0864)\n# License : MIT License\n#==============================================================================\n\n##### IMPORTING #####\nimport numpy as np\nimport xgboost as xgb\nfrom sklearn import datasets\nimport seaborn as sns\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import ElasticNet\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.svm import SVR\nfrom sklearn.model_selection import cross_val_score, train_test_split, KFold\nfrom sklearn.metrics import r2_score\n\nsns.set() # set seaborn style\n\n##### DECLARING AND TRAINING #####\n# Carregamento do dataset do boston, conversão para o framework pandas e como a\n# nomenclatura não é automática, foi dado valor às colunas da tabela do pandas.\n# Para verificar como estão os dados, chamar print(boston_pd.head())\nboston = datasets.load_boston()\nboston_pd = pd.DataFrame(boston.data)\nboston_pd.columns = boston.feature_names\n\n# É necessária então a divisão dos datasets, pelo método train_test_split. Para\n# encontrar o tamanho de cada tensor que foi dividido, print(X_train.shape)\nX, Y = boston_pd, boston.target\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.1,\n random_state = 42)\n\n# ##### 1ST LEVEL MODELS #####\n# # ElasticNet - baseline model #0\n# print(\"------- FITTING ElasticNet -------\")\n# en_mdl = ElasticNet(alpha = 5.2, l1_ratio = 0.5, random_state = 42)\n# en_cv_scores = cross_val_score(en_mdl, X_train, Y_train, cv=5, scoring='r2')\n# print(\" DONE! CV Scores: {:.4f} (+/- {:.4f})\" .format(en_cv_scores.mean(),\\\n# en_cv_scores.std() * 2))\n\n# SVR - baseline model #1\nprint(\"------- FITTING SVR -------\")\nsvr_mdl = SVR(kernel = 'linear', C = 0.11, epsilon = 0.011, gamma = 0.1)\nsvr_cv_scores = cross_val_score(svr_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(\" DONE! CV Scores: {:.4f} (+/- {:.4f})\" .format(svr_cv_scores.mean(),\\\n svr_cv_scores.std() * 2))\n\n# XGBRegressor - baseline model #2\nprint(\"------- FITTING XGBRegressor -------\")\nxgb_mdl = xgb.XGBRegressor(learning_rate = 0.0503, n_estimators = 339,\n max_depth = 5, min_child_weight = 2, gamma = 0.17,\n subsample = 0.84, colsample_bytree = 0.85,\n reg_alpha = 0.008, reg_lambda = 1.2,\n scale_pos_weight = 1, seed = 42)\nxgb_cv_scores = cross_val_score(xgb_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(\" DONE! CV Scores: {:.4f} (+/- {:.4f})\" .format(xgb_cv_scores.mean(),\\\n xgb_cv_scores.std() * 2))\n\n# RandomForestRegressor - baseline model #3\nprint(\"------- FITTING RandomForestRegressor -------\")\nrf_mdl = RandomForestRegressor(n_estimators = 95, max_features = 'auto',\n max_depth = 18, min_samples_split = 2,\n min_samples_leaf = 1, bootstrap = True,\n random_state = 42)\nrf_cv_scores = cross_val_score(rf_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(\" DONE! CV Scores: {:.4f} (+/- {:.4f})\" .format(rf_cv_scores.mean(),\\\n rf_cv_scores.std() * 2))\n\nclass Ensemble(object):\n \"\"\"Ensemble base_models on train data than fit/predict\n\n The object input is composed of 'n_splits', 'stacker' and list of\n 'base_models'.\n\n The __init__ method self-assign the inputs.\n\n The fit_predict method divides the dataset in 'n_splits' then it loops\n trough ammount of 'base_models' fitting all splits and then averaging it on\n a new column in the end. In the end, predictions are made with these new\n columns.\n\n If sought the use of voting ensemble, the ammount of models passed on\n base_models can be repeated.\n \"\"\"\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n\n # Create folds on the dataset based on n_splits\n folds = list(KFold(n_splits = self.n_splits, shuffle = True,\n random_state = 42).split(X, Y))\n\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n\n # Loop trough base_models\n print(\"------- FITTING Stacker - 2nd level -------\")\n for i, clf in enumerate(self.base_models):\n\n # Create a dummy to calculate predictions on all folds\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n\n # Loop trough data folds\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n\n print (\" Model {}, fold {}. R^2 score: {:.4f}\"\\\n .format(i, j, r2_score(Y_holdout, Y_pred)))\n\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n\n # Update test data with average of predictions from the dummy\n S_test[:, i] = S_test_i.mean(axis = 1)\n\n # Print final CV score\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\"\\033[1;92mDONE! \\033[0;0m\\033[1;37mCV scores: {:.4f} (+/- {:.4f})\"\n .format(results.mean(), results.std() * 2))\n\n # After creating new features on the test data, fit the chosen stacker\n # on train data and finally predict on test data, then return\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n\n return final_prediction\n\nstack = Ensemble(n_splits = 5, stacker = svr_mdl,\n base_models = (xgb_mdl, rf_mdl, xgb_mdl, svr_mdl, xgb_mdl))\n\nstack_pred = stack.fit_predict(X_train, Y_train, X_test)\n\n##### PLOTS #####\n# Plot outputs using scatter. Ticks are diabled and everything else is the clea-\n# nest that I could. Predicted prices vs Real Prices\ncustom_style = {'axes.labelcolor': 'white',\n 'xtick.color': 'white',\n 'ytick.color': 'white'}\ndata = pd.DataFrame(data = {'stack_pred': stack_pred, 'Y_test': Y_test})\nax = sns.lmplot(x='Y_test', y='stack_pred', data = data, truncate=True, size=5)\nax.set_axis_labels(\"Real prices\", \"Predicted prices\")\nplt.tick_params(axis='both', colors='gray')\nplt.title(\"Real vs Predicted prices on Boston Housing\", fontweight = 'bold')\nplt.tight_layout()\nplt.show()\n",
"import numpy as np\nimport xgboost as xgb\nfrom sklearn import datasets\nimport seaborn as sns\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import ElasticNet\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.svm import SVR\nfrom sklearn.model_selection import cross_val_score, train_test_split, KFold\nfrom sklearn.metrics import r2_score\nsns.set()\nboston = datasets.load_boston()\nboston_pd = pd.DataFrame(boston.data)\nboston_pd.columns = boston.feature_names\nX, Y = boston_pd, boston.target\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.1,\n random_state=42)\nprint('------- FITTING SVR -------')\nsvr_mdl = SVR(kernel='linear', C=0.11, epsilon=0.011, gamma=0.1)\nsvr_cv_scores = cross_val_score(svr_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(svr_cv_scores.mean(),\n svr_cv_scores.std() * 2))\nprint('------- FITTING XGBRegressor -------')\nxgb_mdl = xgb.XGBRegressor(learning_rate=0.0503, n_estimators=339,\n max_depth=5, min_child_weight=2, gamma=0.17, subsample=0.84,\n colsample_bytree=0.85, reg_alpha=0.008, reg_lambda=1.2,\n scale_pos_weight=1, seed=42)\nxgb_cv_scores = cross_val_score(xgb_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(xgb_cv_scores.mean(),\n xgb_cv_scores.std() * 2))\nprint('------- FITTING RandomForestRegressor -------')\nrf_mdl = RandomForestRegressor(n_estimators=95, max_features='auto',\n max_depth=18, min_samples_split=2, min_samples_leaf=1, bootstrap=True,\n random_state=42)\nrf_cv_scores = cross_val_score(rf_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(rf_cv_scores.mean(), \n rf_cv_scores.std() * 2))\n\n\nclass Ensemble(object):\n \"\"\"Ensemble base_models on train data than fit/predict\n\n The object input is composed of 'n_splits', 'stacker' and list of\n 'base_models'.\n\n The __init__ method self-assign the inputs.\n\n The fit_predict method divides the dataset in 'n_splits' then it loops\n trough ammount of 'base_models' fitting all splits and then averaging it on\n a new column in the end. In the end, predictions are made with these new\n columns.\n\n If sought the use of voting ensemble, the ammount of models passed on\n base_models can be repeated.\n \"\"\"\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\nstack = Ensemble(n_splits=5, stacker=svr_mdl, base_models=(xgb_mdl, rf_mdl,\n xgb_mdl, svr_mdl, xgb_mdl))\nstack_pred = stack.fit_predict(X_train, Y_train, X_test)\ncustom_style = {'axes.labelcolor': 'white', 'xtick.color': 'white',\n 'ytick.color': 'white'}\ndata = pd.DataFrame(data={'stack_pred': stack_pred, 'Y_test': Y_test})\nax = sns.lmplot(x='Y_test', y='stack_pred', data=data, truncate=True, size=5)\nax.set_axis_labels('Real prices', 'Predicted prices')\nplt.tick_params(axis='both', colors='gray')\nplt.title('Real vs Predicted prices on Boston Housing', fontweight='bold')\nplt.tight_layout()\nplt.show()\n",
"<import token>\nsns.set()\nboston = datasets.load_boston()\nboston_pd = pd.DataFrame(boston.data)\nboston_pd.columns = boston.feature_names\nX, Y = boston_pd, boston.target\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.1,\n random_state=42)\nprint('------- FITTING SVR -------')\nsvr_mdl = SVR(kernel='linear', C=0.11, epsilon=0.011, gamma=0.1)\nsvr_cv_scores = cross_val_score(svr_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(svr_cv_scores.mean(),\n svr_cv_scores.std() * 2))\nprint('------- FITTING XGBRegressor -------')\nxgb_mdl = xgb.XGBRegressor(learning_rate=0.0503, n_estimators=339,\n max_depth=5, min_child_weight=2, gamma=0.17, subsample=0.84,\n colsample_bytree=0.85, reg_alpha=0.008, reg_lambda=1.2,\n scale_pos_weight=1, seed=42)\nxgb_cv_scores = cross_val_score(xgb_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(xgb_cv_scores.mean(),\n xgb_cv_scores.std() * 2))\nprint('------- FITTING RandomForestRegressor -------')\nrf_mdl = RandomForestRegressor(n_estimators=95, max_features='auto',\n max_depth=18, min_samples_split=2, min_samples_leaf=1, bootstrap=True,\n random_state=42)\nrf_cv_scores = cross_val_score(rf_mdl, X_train, Y_train, cv=5, scoring='r2')\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(rf_cv_scores.mean(), \n rf_cv_scores.std() * 2))\n\n\nclass Ensemble(object):\n \"\"\"Ensemble base_models on train data than fit/predict\n\n The object input is composed of 'n_splits', 'stacker' and list of\n 'base_models'.\n\n The __init__ method self-assign the inputs.\n\n The fit_predict method divides the dataset in 'n_splits' then it loops\n trough ammount of 'base_models' fitting all splits and then averaging it on\n a new column in the end. In the end, predictions are made with these new\n columns.\n\n If sought the use of voting ensemble, the ammount of models passed on\n base_models can be repeated.\n \"\"\"\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\nstack = Ensemble(n_splits=5, stacker=svr_mdl, base_models=(xgb_mdl, rf_mdl,\n xgb_mdl, svr_mdl, xgb_mdl))\nstack_pred = stack.fit_predict(X_train, Y_train, X_test)\ncustom_style = {'axes.labelcolor': 'white', 'xtick.color': 'white',\n 'ytick.color': 'white'}\ndata = pd.DataFrame(data={'stack_pred': stack_pred, 'Y_test': Y_test})\nax = sns.lmplot(x='Y_test', y='stack_pred', data=data, truncate=True, size=5)\nax.set_axis_labels('Real prices', 'Predicted prices')\nplt.tick_params(axis='both', colors='gray')\nplt.title('Real vs Predicted prices on Boston Housing', fontweight='bold')\nplt.tight_layout()\nplt.show()\n",
"<import token>\nsns.set()\n<assignment token>\nprint('------- FITTING SVR -------')\n<assignment token>\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(svr_cv_scores.mean(),\n svr_cv_scores.std() * 2))\nprint('------- FITTING XGBRegressor -------')\n<assignment token>\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(xgb_cv_scores.mean(),\n xgb_cv_scores.std() * 2))\nprint('------- FITTING RandomForestRegressor -------')\n<assignment token>\nprint(' DONE! CV Scores: {:.4f} (+/- {:.4f})'.format(rf_cv_scores.mean(), \n rf_cv_scores.std() * 2))\n\n\nclass Ensemble(object):\n \"\"\"Ensemble base_models on train data than fit/predict\n\n The object input is composed of 'n_splits', 'stacker' and list of\n 'base_models'.\n\n The __init__ method self-assign the inputs.\n\n The fit_predict method divides the dataset in 'n_splits' then it loops\n trough ammount of 'base_models' fitting all splits and then averaging it on\n a new column in the end. In the end, predictions are made with these new\n columns.\n\n If sought the use of voting ensemble, the ammount of models passed on\n base_models can be repeated.\n \"\"\"\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\n<assignment token>\nax.set_axis_labels('Real prices', 'Predicted prices')\nplt.tick_params(axis='both', colors='gray')\nplt.title('Real vs Predicted prices on Boston Housing', fontweight='bold')\nplt.tight_layout()\nplt.show()\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass Ensemble(object):\n \"\"\"Ensemble base_models on train data than fit/predict\n\n The object input is composed of 'n_splits', 'stacker' and list of\n 'base_models'.\n\n The __init__ method self-assign the inputs.\n\n The fit_predict method divides the dataset in 'n_splits' then it loops\n trough ammount of 'base_models' fitting all splits and then averaging it on\n a new column in the end. In the end, predictions are made with these new\n columns.\n\n If sought the use of voting ensemble, the ammount of models passed on\n base_models can be repeated.\n \"\"\"\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass Ensemble(object):\n <docstring token>\n\n def __init__(self, n_splits, stacker, base_models):\n self.n_splits = n_splits\n self.stacker = stacker\n self.base_models = base_models\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass Ensemble(object):\n <docstring token>\n <function token>\n\n def fit_predict(self, X, Y, T):\n X = np.array(X)\n Y = np.array(Y)\n T = np.array(T)\n folds = list(KFold(n_splits=self.n_splits, shuffle=True,\n random_state=42).split(X, Y))\n S_train = np.zeros((X.shape[0], len(self.base_models)))\n S_test = np.zeros((T.shape[0], len(self.base_models)))\n print('------- FITTING Stacker - 2nd level -------')\n for i, clf in enumerate(self.base_models):\n S_test_i = np.zeros((T.shape[0], self.n_splits))\n for j, (train_idx, test_idx) in enumerate(folds):\n X_train = X[train_idx]\n Y_train = Y[train_idx]\n X_holdout = X[test_idx]\n Y_holdout = Y[test_idx]\n clf.fit(X_train, Y_train)\n Y_pred = clf.predict(X_holdout)[:]\n print(' Model {}, fold {}. R^2 score: {:.4f}'.format(i, j,\n r2_score(Y_holdout, Y_pred)))\n S_train[test_idx, i] = Y_pred\n S_test_i[:, j] = clf.predict(T)[:]\n S_test[:, i] = S_test_i.mean(axis=1)\n results = cross_val_score(self.stacker, S_train, Y, cv=5, scoring='r2')\n print(\n '\\x1b[1;92mDONE! \\x1b[0;0m\\x1b[1;37mCV scores: {:.4f} (+/- {:.4f})'\n .format(results.mean(), results.std() * 2))\n self.stacker.fit(S_train, Y)\n final_prediction = self.stacker.predict(S_test)[:]\n return final_prediction\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n\n\nclass Ensemble(object):\n <docstring token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<class token>\n<assignment token>\n<code token>\n"
] | false |
9,784 |
41ca762fe6865613ae4ef2f657f86b516353676f
|
from django.contrib.auth import authenticate, login, logout
from django.template import loader
from django.http import (HttpResponse, JsonResponse,
HttpResponseForbidden, HttpResponseBadRequest)
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
import json
from aimodel.AnalyticSession import AnalyticSession
from data.DatasetConfigManager import DatasetConfigManager
def index(request, err_msg=None):
"""
Renders the index page.
"""
template = loader.get_template("aimodel/index.html")
context = {}
context["err_msg"] = err_msg
return HttpResponse(template.render(context, request))
@require_POST
def log_in(request):
"""
Handles login.
"""
# Get the username and password
username = request.POST.get("username")
password = request.POST.get("password")
if not username or not password:
return index(request, "Invalid credentials!")
# Authenticate and log in
user = authenticate(username=username, password=password)
if user:
login(request, user)
return redirect("/main")
else:
return index(request, "Invalid credentials!")
def main(request):
"""
Renders the main page behind login.
"""
if not request.user.is_authenticated:
return redirect("/")
template = loader.get_template("aimodel/main.html")
context = dict()
context["datasets"] = DatasetConfigManager.loaded_datasets_list()
return HttpResponse(template.render(context, request))
@require_POST
def analytics_session(request):
"""
Starts a new analytic session.
"""
if not request.user.is_authenticated:
return redirect("/")
try:
dataset = request.POST["dataset"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
if "analytics" in request.session:
del request.session["analytics"]
request.session["analytics"] = AnalyticSession(dataset)
bucket_info = request.session["analytics"].bucket_info()
template = loader.get_template("ui/analytics.html")
context = dict()
context["init_buckets"] = json.dumps(bucket_info["buckets"])
context["init_bucket_ordering"] =\
json.dumps(bucket_info["bucket_ordering"])
return HttpResponse(template.render(context, request))
def log_out(request):
"""
Logs the user out.
"""
if request.user.is_authenticated:
logout(request)
return redirect("/")
def _check_session_valid(request):
"""
A helper function checking whether the user is logged in and the session
data is present.
"""
if not request.user.is_authenticated:
return HttpResponseForbidden(reason="Access denied!")
if "analytics" not in request.session:
err = "Could not fetch analytic session data."
return HttpResponseBadRequest(reason=err)
return None
def bucket_info(request):
"""
Fetches information about current buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
return JsonResponse(request.session["analytics"].bucket_info())
def create_bucket(request):
"""
Creates a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
try:
request.session["analytics"].create_bucket()
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def delete_bucket(request):
"""
Deletes a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].delete_bucket(bucket_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def rename_bucket(request):
"""
Renames a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
new_bucket_name = request_data["new_bucket_name"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].rename_bucket(bucket_id, new_bucket_name)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def swap_buckets(request):
"""
Swaps the position of two buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket1_id = request_data["bucket1_id"]
bucket2_id = request_data["bucket2_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].swap_buckets(bucket1_id, bucket2_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def toggle_bucket(request):
"""
Toggles (activates/deactivates) a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].toggle_bucket(bucket_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def interaction_round(request):
"""
Performs an interaction round, providing new image suggestions.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
user_feedback = json.loads(request.body)
try:
suggs = request.session["analytics"].interaction_round(user_feedback)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(suggs, safe=False)
@require_POST
def bucket_view_data(request):
"""
Obtains bucket view data, i.e., the images in the bucket with bucket
confidences.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
sort_by = request_data["sort_by"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
bucket_view_data =\
request.session["analytics"].bucket_view_data(bucket_id, sort_by)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(bucket_view_data, safe=False)
def toggle_mode(request):
"""
Toggles between Tetris/grid.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request.session["analytics"].toggle_mode()
return JsonResponse({})
@require_POST
def grid_set_size(request):
"""
Resizes the grid.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
dim = request_data["dim"]
new_size = request_data["new_size"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
new_grid_data = request.session["analytics"].grid_set_size(dim,
new_size)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(new_grid_data, safe=False)
@require_POST
def transfer_images(request):
"""
Transfers (moves/copies) images between buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
images = request_data["images"]
bucket_src = request_data["bucket_src"]
bucket_dst = request_data["bucket_dst"]
mode = request_data["mode"]
sort_by = request_data["sort_by"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].transfer_images(images,
bucket_src, bucket_dst,
mode)
bucket_view_data =\
request.session["analytics"].bucket_view_data(bucket_src, sort_by)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(bucket_view_data, safe=False)
@require_POST
def fast_forward(request):
"""
Fast-forwards a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket = request_data["bucket"]
n_ff = request_data["n_ff"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].fast_forward(bucket, n_ff)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def ff_commit(request):
"""
Commits a fast-forward.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
print(request_data)
try:
bucket = request_data["bucket"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].ff_commit(bucket)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
def end_session(request):
"""
Ends an analytic session.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
del request.session["analytics"]
response = {
"redirect_url": "/main"
}
return JsonResponse(response)
|
[
"from django.contrib.auth import authenticate, login, logout\nfrom django.template import loader\nfrom django.http import (HttpResponse, JsonResponse,\n HttpResponseForbidden, HttpResponseBadRequest)\nfrom django.shortcuts import redirect\nfrom django.views.decorators.http import require_POST\n\nimport json\n\nfrom aimodel.AnalyticSession import AnalyticSession\nfrom data.DatasetConfigManager import DatasetConfigManager\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template(\"aimodel/index.html\")\n context = {}\n\n context[\"err_msg\"] = err_msg\n\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n\n # Get the username and password\n username = request.POST.get(\"username\")\n password = request.POST.get(\"password\")\n\n if not username or not password:\n return index(request, \"Invalid credentials!\")\n\n # Authenticate and log in\n user = authenticate(username=username, password=password)\n\n if user:\n login(request, user)\n return redirect(\"/main\")\n else:\n return index(request, \"Invalid credentials!\")\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n\n if not request.user.is_authenticated:\n return redirect(\"/\")\n\n template = loader.get_template(\"aimodel/main.html\")\n context = dict()\n context[\"datasets\"] = DatasetConfigManager.loaded_datasets_list()\n\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n\n if not request.user.is_authenticated:\n return redirect(\"/\")\n\n try:\n dataset = request.POST[\"dataset\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n if \"analytics\" in request.session:\n del request.session[\"analytics\"]\n\n request.session[\"analytics\"] = AnalyticSession(dataset)\n\n bucket_info = request.session[\"analytics\"].bucket_info()\n\n template = loader.get_template(\"ui/analytics.html\")\n\n context = dict()\n context[\"init_buckets\"] = json.dumps(bucket_info[\"buckets\"])\n context[\"init_bucket_ordering\"] =\\\n json.dumps(bucket_info[\"bucket_ordering\"])\n\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n\n if request.user.is_authenticated:\n logout(request)\n\n return redirect(\"/\")\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason=\"Access denied!\")\n\n if \"analytics\" not in request.session:\n err = \"Could not fetch analytic session data.\"\n return HttpResponseBadRequest(reason=err)\n\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n return JsonResponse(request.session[\"analytics\"].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n try:\n request.session[\"analytics\"].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef delete_bucket(request):\n \"\"\"\n Deletes a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].delete_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n new_bucket_name = request_data[\"new_bucket_name\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket1_id = request_data[\"bucket1_id\"]\n bucket2_id = request_data[\"bucket2_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n user_feedback = json.loads(request.body)\n\n try:\n suggs = request.session[\"analytics\"].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n sort_by = request_data[\"sort_by\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n bucket_view_data =\\\n request.session[\"analytics\"].bucket_view_data(bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request.session[\"analytics\"].toggle_mode()\n\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n dim = request_data[\"dim\"]\n new_size = request_data[\"new_size\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n new_grid_data = request.session[\"analytics\"].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n images = request_data[\"images\"]\n bucket_src = request_data[\"bucket_src\"]\n bucket_dst = request_data[\"bucket_dst\"]\n mode = request_data[\"mode\"]\n sort_by = request_data[\"sort_by\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].transfer_images(images,\n bucket_src, bucket_dst,\n mode)\n bucket_view_data =\\\n request.session[\"analytics\"].bucket_view_data(bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket = request_data[\"bucket\"]\n n_ff = request_data[\"n_ff\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n print(request_data)\n\n try:\n bucket = request_data[\"bucket\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n del request.session[\"analytics\"]\n\n response = {\n \"redirect_url\": \"/main\"\n }\n\n return JsonResponse(response)\n",
"from django.contrib.auth import authenticate, login, logout\nfrom django.template import loader\nfrom django.http import HttpResponse, JsonResponse, HttpResponseForbidden, HttpResponseBadRequest\nfrom django.shortcuts import redirect\nfrom django.views.decorators.http import require_POST\nimport json\nfrom aimodel.AnalyticSession import AnalyticSession\nfrom data.DatasetConfigManager import DatasetConfigManager\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n try:\n request.session['analytics'].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef delete_bucket(request):\n \"\"\"\n Deletes a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].delete_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket = request_data['bucket']\n n_ff = request_data['n_ff']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n try:\n request.session['analytics'].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef delete_bucket(request):\n \"\"\"\n Deletes a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].delete_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket = request_data['bucket']\n n_ff = request_data['n_ff']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n try:\n request.session['analytics'].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket = request_data['bucket']\n n_ff = request_data['n_ff']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket = request_data['bucket']\n n_ff = request_data['n_ff']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
9,785 |
518dcdca8f5e6b42624083e4327143dfba59b2ba
|
def emphasize(sentence):
words = sentence.split(" ")
for i, word in enumerate(words):
words[i] = word[0].upper() + word[1:].lower()
return " ".join(words)
exp1 = "Hello World"
ans1 = emphasize("hello world")
assert ans1 == exp1, f"expected {exp1}, got {ans1}"
exp2 = "Good Morning"
ans2 = emphasize("GOOD MORNING")
assert ans2 == exp2, f"expected {exp2}, got {ans2}"
exp3 = "99 Red Balloons!"
ans3 = emphasize("99 red balloons!")
assert ans3 == exp3, f"expected {exp3}, got {ans3}"
print("everything okay")
|
[
"def emphasize(sentence):\n words = sentence.split(\" \")\n for i, word in enumerate(words):\n words[i] = word[0].upper() + word[1:].lower()\n return \" \".join(words)\n\n\nexp1 = \"Hello World\"\nans1 = emphasize(\"hello world\")\nassert ans1 == exp1, f\"expected {exp1}, got {ans1}\"\n\nexp2 = \"Good Morning\"\nans2 = emphasize(\"GOOD MORNING\")\nassert ans2 == exp2, f\"expected {exp2}, got {ans2}\"\n\nexp3 = \"99 Red Balloons!\"\nans3 = emphasize(\"99 red balloons!\")\nassert ans3 == exp3, f\"expected {exp3}, got {ans3}\"\n\nprint(\"everything okay\")\n",
"def emphasize(sentence):\n words = sentence.split(' ')\n for i, word in enumerate(words):\n words[i] = word[0].upper() + word[1:].lower()\n return ' '.join(words)\n\n\nexp1 = 'Hello World'\nans1 = emphasize('hello world')\nassert ans1 == exp1, f'expected {exp1}, got {ans1}'\nexp2 = 'Good Morning'\nans2 = emphasize('GOOD MORNING')\nassert ans2 == exp2, f'expected {exp2}, got {ans2}'\nexp3 = '99 Red Balloons!'\nans3 = emphasize('99 red balloons!')\nassert ans3 == exp3, f'expected {exp3}, got {ans3}'\nprint('everything okay')\n",
"def emphasize(sentence):\n words = sentence.split(' ')\n for i, word in enumerate(words):\n words[i] = word[0].upper() + word[1:].lower()\n return ' '.join(words)\n\n\n<assignment token>\nassert ans1 == exp1, f'expected {exp1}, got {ans1}'\n<assignment token>\nassert ans2 == exp2, f'expected {exp2}, got {ans2}'\n<assignment token>\nassert ans3 == exp3, f'expected {exp3}, got {ans3}'\nprint('everything okay')\n",
"def emphasize(sentence):\n words = sentence.split(' ')\n for i, word in enumerate(words):\n words[i] = word[0].upper() + word[1:].lower()\n return ' '.join(words)\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
9,786 |
1c55cfa03cd9210b7cf9e728732afe19930e9a41
|
import yet
import pickle
sources = pickle.load(open("./db/source_list"))
addr_list = sources.keys()
'''
for i in range(len(addr_list)):
print addr_list[i],
try:
a = yet.tree(None, sources[addr_list[i]])
print ' Owner :',
for i in a.owner.keys():
print i+ '() ' + a.owner[i][1]['name'] + ',',
except Exception as e:
pass
#print 'error!'
print ''
'''
compiled = yet.solc.compile_source(open("./test.sol").read(100000))
ast = compiled[compiled.keys()[0]]['ast']
b = yet.tree(ast)
print 'modifier list'
for i in b.modifier_list:
print i['attributes']['name']
print 'function list'
for i in b.function_list:
print i['attributes']['name']
print ''
for i in b.public_function_list:
print i['attributes']['name']
print b.owner
'''
import pickle
import solc
import re
import utils.getsource as gs
import utils.verified_parse as vp
sources = pickle.load(open('./db/real_source_list', 'r'))
addr_list = sources.keys()
new_sources = {}
compiled_list = []
err_count = 0
for i in range(len(addr_list)):
print str(i)
#print gs.comment_remover(sources[addr_list[i]])
#print gs.clear(sources[addr_list[i]])
try:
new_sources[addr_list[i]] = re.sub('pragma.+[\n]', '', gs.clear(sources[addr_list[i]]))
except:
print 'fuck!!'
err_count += 1
#compiled_list.append(solc.compile_source(tmp))
pickle.dump(new_sources, open("./db/real_source_list.tmp", "wb"))
print 'total error count : ' + str(err_count)
for i in addr_list:
tmp_source = gs.comment_remover(sources[i])
print gs.getcontractname(tmp_source)
'''
|
[
"import yet\nimport pickle\n\nsources = pickle.load(open(\"./db/source_list\"))\naddr_list = sources.keys()\n\n'''\nfor i in range(len(addr_list)):\n print addr_list[i], \n try:\n a = yet.tree(None, sources[addr_list[i]])\n\n print ' Owner :',\n\n for i in a.owner.keys():\n print i+ '() ' + a.owner[i][1]['name'] + ',',\n except Exception as e:\n pass\n #print 'error!'\n print ''\n'''\n\ncompiled = yet.solc.compile_source(open(\"./test.sol\").read(100000))\nast = compiled[compiled.keys()[0]]['ast']\n\nb = yet.tree(ast)\n\nprint 'modifier list'\nfor i in b.modifier_list:\n print i['attributes']['name']\n\nprint 'function list'\nfor i in b.function_list:\n print i['attributes']['name']\nprint ''\n\nfor i in b.public_function_list:\n print i['attributes']['name']\n\nprint b.owner\n\n\n'''\nimport pickle\nimport solc\nimport re\n\nimport utils.getsource as gs\nimport utils.verified_parse as vp\n\nsources = pickle.load(open('./db/real_source_list', 'r'))\naddr_list = sources.keys()\n\nnew_sources = {}\n\ncompiled_list = []\nerr_count = 0\nfor i in range(len(addr_list)):\n print str(i)\n\n #print gs.comment_remover(sources[addr_list[i]])\n #print gs.clear(sources[addr_list[i]])\n\n try:\n new_sources[addr_list[i]] = re.sub('pragma.+[\\n]', '', gs.clear(sources[addr_list[i]]))\n except:\n print 'fuck!!'\n err_count += 1\n\n #compiled_list.append(solc.compile_source(tmp))\n\npickle.dump(new_sources, open(\"./db/real_source_list.tmp\", \"wb\"))\n\nprint 'total error count : ' + str(err_count)\nfor i in addr_list:\n tmp_source = gs.comment_remover(sources[i])\n\n print gs.getcontractname(tmp_source)\n'''\n"
] | true |
9,787 |
a78bbb85f4912e5f7ea23f689de65cb16a38d814
|
import asyncio
from . import edit_or_reply, udy
plugin_category = "utils"
@udy.cod_cmd(
pattern="as$",
command=("as", plugin_category),
info={
"header": "salam.",
"usage": "{tr}as",
},
)
async def _(event):
"animation command"
event = await edit_or_reply(event, "as")
await event.edit("yuuhuuuu")
await asyncio.sleep(2)
await event.edit("Assalamualaikum wr. wb.")
@udy.cod_cmd(
pattern="ws$",
command=("ws", plugin_category),
info={
"header": "answer the salam.",
"usage": "{tr}ws",
},
)
async def _(event):
"animation command"
event = await edit_or_reply(event, "ws")
await event.edit("huuyyyy")
await asyncio.sleep(2)
await event.edit("Waalaikum salam wr. wb.")
|
[
"import asyncio\n\nfrom . import edit_or_reply, udy\n\nplugin_category = \"utils\"\n\n\[email protected]_cmd(\n pattern=\"as$\",\n command=(\"as\", plugin_category),\n info={\n \"header\": \"salam.\",\n \"usage\": \"{tr}as\",\n },\n)\nasync def _(event):\n \"animation command\"\n event = await edit_or_reply(event, \"as\")\n await event.edit(\"yuuhuuuu\")\n await asyncio.sleep(2)\n await event.edit(\"Assalamualaikum wr. wb.\")\n\n\[email protected]_cmd(\n pattern=\"ws$\",\n command=(\"ws\", plugin_category),\n info={\n \"header\": \"answer the salam.\",\n \"usage\": \"{tr}ws\",\n },\n)\nasync def _(event):\n \"animation command\"\n event = await edit_or_reply(event, \"ws\")\n await event.edit(\"huuyyyy\")\n await asyncio.sleep(2)\n await event.edit(\"Waalaikum salam wr. wb.\")\n",
"import asyncio\nfrom . import edit_or_reply, udy\nplugin_category = 'utils'\n\n\[email protected]_cmd(pattern='as$', command=('as', plugin_category), info={'header':\n 'salam.', 'usage': '{tr}as'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'as')\n await event.edit('yuuhuuuu')\n await asyncio.sleep(2)\n await event.edit('Assalamualaikum wr. wb.')\n\n\[email protected]_cmd(pattern='ws$', command=('ws', plugin_category), info={'header':\n 'answer the salam.', 'usage': '{tr}ws'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'ws')\n await event.edit('huuyyyy')\n await asyncio.sleep(2)\n await event.edit('Waalaikum salam wr. wb.')\n",
"<import token>\nplugin_category = 'utils'\n\n\[email protected]_cmd(pattern='as$', command=('as', plugin_category), info={'header':\n 'salam.', 'usage': '{tr}as'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'as')\n await event.edit('yuuhuuuu')\n await asyncio.sleep(2)\n await event.edit('Assalamualaikum wr. wb.')\n\n\[email protected]_cmd(pattern='ws$', command=('ws', plugin_category), info={'header':\n 'answer the salam.', 'usage': '{tr}ws'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'ws')\n await event.edit('huuyyyy')\n await asyncio.sleep(2)\n await event.edit('Waalaikum salam wr. wb.')\n",
"<import token>\n<assignment token>\n\n\[email protected]_cmd(pattern='as$', command=('as', plugin_category), info={'header':\n 'salam.', 'usage': '{tr}as'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'as')\n await event.edit('yuuhuuuu')\n await asyncio.sleep(2)\n await event.edit('Assalamualaikum wr. wb.')\n\n\[email protected]_cmd(pattern='ws$', command=('ws', plugin_category), info={'header':\n 'answer the salam.', 'usage': '{tr}ws'})\nasync def _(event):\n \"\"\"animation command\"\"\"\n event = await edit_or_reply(event, 'ws')\n await event.edit('huuyyyy')\n await asyncio.sleep(2)\n await event.edit('Waalaikum salam wr. wb.')\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
9,788 |
fd059ae6e5eb3f7dc18dff6f9ed206002cea5fb2
|
import os
print(os.name)
#print(os.environ)
print(os.environ.get('PATH'))
print(os.path.abspath('.'))
os.path.join(os.path.abspath('.'),'testdir')
os.mkdir(os.path.abspath('.'))
|
[
"import os\nprint(os.name)\n#print(os.environ)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'),'testdir')\nos.mkdir(os.path.abspath('.'))",
"import os\nprint(os.name)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'), 'testdir')\nos.mkdir(os.path.abspath('.'))\n",
"<import token>\nprint(os.name)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'), 'testdir')\nos.mkdir(os.path.abspath('.'))\n",
"<import token>\n<code token>\n"
] | false |
9,789 |
44e4151279884ce7c5d5a9e5c82916ce2d3ccbc2
|
import random
from datetime import timedelta
from typing import Union, Type, Tuple, List, Dict
from django import http
from django.test import TestCase, Client
from django.utils import timezone
from exam_web import errors
from exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \
UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus
class ApiClient(Client):
path: str
def __init__(self, path: str, student: Student = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.student = student
self.path = path
self.headers = {'content_type': 'application/json'}
if student:
self.cookies['student'] = student.id
def path_params(self, **params):
return ApiClient(self.path.format(**params), self.student)
def get(self, **kwargs):
return super().get(self.path, data=kwargs, **self.headers)
def post(self, **json):
return super().post(self.path, data=json, **self.headers)
def __call__(self, **kwargs):
raise AttributeError('Use `get` or `post` methods instead')
class ApiTestCase(TestCase):
group: AcademyGroup
student: Student
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = AcademyGroup.objects.create(name='test_group')
cls.student = Student.objects.create(name='test user', group=cls.group)
@classmethod
def tearDownClass(cls):
cls.student.delete()
cls.group.delete()
super().tearDownClass()
def setup_exam_objects(self):
self.session = ExamSession.objects.create(
start_time=timezone.now(), duration=timedelta(minutes=40))
self.student_session = UserSession.objects.create(
student=self.student, exam_session=self.session)
self.questions = [
Question.objects.create(
stage=Stage.first, type=QuestionType.single, max_score=1,
text='test single question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.first, type=QuestionType.multi, max_score=1,
text='test multi question', options=['a', 'b', 'c']
),
Question.objects.create(
stage=Stage.second, type=QuestionType.open, max_score=1,
text='test open question', options=None,
),
]
self.tickets = [
ExamTicket.objects.create(
student=self.student, session=self.student_session,
question=question) for question in self.questions
]
self.ticket_map = {x.id: x for x in self.tickets}
def teardown_exam_objects(self):
for ticket in self.tickets:
ticket.delete()
for question in self.questions:
question.delete()
self.student_session.delete()
def assertResponseSuccess(self, response: http.HttpResponse):
content = response.content.decode()
self.assertEqual(response.status_code, 200,
(response.status_code, content))
content = response.json()
self.assertIn('result', content, content)
return content['result']
def assertResponseError(
self, response: http.JsonResponse,
error: Union[errors.APIError, Type[errors.APIError]] = None
) -> Tuple[int, str]:
content = response.json()
self.assertGreaterEqual(response.status_code, 400,
(response.status_code, content))
self.assertIn('error', content, content)
if error is not None:
if isinstance(error, type):
error = error()
self.assertEqual(response.status_code, error.status,
(response.status_code, content))
self.assertEqual(content['error'], error.message,
(response.status_code, content))
return response.status_code, content['error']
class TestAuthorize(ApiTestCase):
authorize: ApiClient
def setUp(self):
super().setUp()
self.authorize = ApiClient('/api/authorize')
def test_authorized(self):
response = self.authorize.post(token=self.student.id)
result = self.assertResponseSuccess(response)
self.assertEqual(response.cookies['student'].value, self.student.id)
self.assertEqual(result['name'], self.student.name)
self.assertEqual(result['group'], self.group.name)
self.assertEqual(result['id'], self.student.id)
def test_authorized_unknown_token(self):
response = self.authorize.post(token=uuid_str())
self.assertResponseError(response, errors.Unauthorized)
def test_authorized_invalid_params(self):
response = self.authorize.post()
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.post(token=12345678)
self.assertResponseError(response, errors.InvalidParameter('token'))
response = self.authorize.get()
self.assertEqual(response.status_code, 405)
class TestGetExamSessions(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
def setUp(self):
super().setUp()
self.get_exams = ApiClient('/api/exams', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exams_available(self):
result = self.assertResponseSuccess(self.get_exams.get())
self.assertIsInstance(result, list)
self.assertEqual(len(result), 1)
user_session = result[0]
self.assertEqual(
user_session['started_at'], self.session.start_time.isoformat())
self.assertEqual(user_session['duration'],
self.session.duration.total_seconds() / 60)
self.assertEqual(user_session['checked_in'], False)
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.available.value)
self.assertEqual(user_session['score'], None)
def test_get_exams_check_in(self):
self.student_session.started_at = timezone.now()
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['checked_in'], True)
def test_get_exams_submitted(self):
now = timezone.now()
self.student_session.started_at = timezone.now()
self.student_session.finished_at = now
self.student_session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['finished_at'], now.isoformat())
self.assertEqual(user_session['status'], ExamStatus.submitted)
self.assertEqual(user_session['score'], None)
def test_get_exams_non_available(self):
self.session.start_time = timezone.now() + self.session.duration
self.session.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['started_at'],
self.session.start_time.isoformat())
self.assertEqual(user_session['finished_at'], None)
self.assertEqual(user_session['status'], ExamStatus.not_available)
def test_get_exams_unauthorized(self):
self.get_exams.cookies = {}
self.assertResponseError(self.get_exams.get(), errors.Unauthorized)
response = self.get_exams.post()
self.assertEqual(response.status_code, 405)
def test_get_exams_score(self):
for ticket in self.tickets:
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'],
sum(t.score for t in self.tickets))
self.tickets[0].score = None
self.tickets[0].save()
result = self.assertResponseSuccess(self.get_exams.get())
user_session = result[0]
self.assertEqual(user_session['score'], None)
class TestGetExamTickets(ApiTestCase):
get_exams: ApiClient
session: ExamSession
student_session: UserSession
questions: List[Question]
tickets: List[ExamTicket]
ticket_map: Dict[str, ExamTicket]
def setUp(self):
super().setUp()
self.get_exam_questions = \
ApiClient('/api/tickets', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_get_exam_questions(self):
self.assertFalse(self.student_session.check_in)
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
self.assertEqual([x['id'] for x in questions], [
x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)
])
for question in questions:
ticket = self.ticket_map[question['id']]
ticket_question = ticket.question
self.assertEqual(question.pop('id'), ticket.id)
view = ticket_question.as_dict
view.pop('id')
self.assertEqual(question, view)
def test_get_exam_questions_already_checked_in(self):
self.student_session.check_in = True
checkin_date = self.student_session.started_at
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.available)
self.assertEqual(result['score'], None)
self.student_session.refresh_from_db()
self.assertTrue(self.student_session.check_in)
self.assertEqual(self.student_session.started_at, checkin_date)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
def test_get_exam_questions_not_available(self):
self.session.start_time += self.session.duration
self.session.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.not_available)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), 0)
def test_get_exam_questions_submitted(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], None)
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
answer = question.pop('answer')
self.assertEqual(answer, ticket.answer)
self.assertEqual(question['score'], None)
def test_get_exam_questions_submitted_and_scored(self):
self.student_session.finished_at = timezone.now()
self.student_session.save()
ANSWER = 'answer'
for ticket in self.tickets:
ticket.answer = ANSWER
ticket.score = 1.0
ticket.save()
result = self.assertResponseSuccess(
self.get_exam_questions.post(session_id=self.student_session.id))
self.assertEqual(result['status'], ExamStatus.submitted)
self.assertEqual(result['score'], sum(t.score for t in self.tickets))
questions = result['questions']
self.assertIsInstance(questions, list)
self.assertEqual(len(questions), len(self.tickets))
for question in questions:
ticket = self.ticket_map[question['id']]
ticket.refresh_from_db()
self.assertEqual(question['score'], ticket.score)
def test_get_exam_questions_invalid_params(self):
self.assertResponseError(self.get_exam_questions.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(
self.get_exam_questions.post(session_id=uuid_str()),
errors.ExamNotFound)
self.get_exam_questions.cookies = {}
self.assertResponseError(
self.get_exam_questions.post(session_id=self.student_session.id),
errors.Unauthorized)
response = self.get_exam_questions.get()
self.assertEqual(response.status_code, 405)
class TestSubmitExam(ApiTestCase):
def setUp(self):
super().setUp()
self.submit_exam = ApiClient('/api/submit', student=self.student)
self.setup_exam_objects()
def tearDown(self):
self.teardown_exam_objects()
super().tearDown()
def test_submit_exam(self):
answers = {}
ANSWER = 'answer'
for ticket in self.tickets:
if ticket.question.type == QuestionType.single:
answers[ticket.id] = \
random.randint(0, len(ticket.question.options)-1)
elif ticket.question.type == QuestionType.multi:
answers[ticket.id] = random.sample(
list(range(0, len(ticket.question.options))),
k=random.randint(0, len(ticket.question.options))
)
else:
answers[ticket.id] = ANSWER
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
if ticket.question.type == QuestionType.single:
self.assertEqual(
ticket.answer, ticket.question.options[answers[ticket.id]])
elif ticket.question.type == QuestionType.multi:
self.assertEqual(ticket.answer, ';'.join([
ticket.question.options[x]
for x in sorted(answers[ticket.id])
]))
self.assertIsNotNone(ticket.answered_at)
def test_submit_without_any_answer(self):
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers={}))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(ticket.answered_at)
self.assertIsNone(ticket.answer)
def test_submit_partial_answer_errors(self):
ANSWER = 'answer'
answers = {
# неверный порядковый индекс ответа
self.tickets[0].id: len(self.tickets[0].question.options),
# неверный тип ответа
self.tickets[1].id: 0,
# корректный ответ
self.tickets[2].id: ANSWER,
# неверный ид билета
uuid_str(): ANSWER,
# несуществующий тикет
self.tickets[2].id + 1: ANSWER,
}
result = self.assertResponseSuccess(self.submit_exam.post(
session_id=self.student_session.id, answers=answers))
self.assertEqual(result, True)
self.student_session.refresh_from_db()
self.assertEqual(self.student_session.status, ExamStatus.submitted)
for ticket in self.tickets:
ticket.refresh_from_db()
self.assertIsNone(self.tickets[0].answer)
self.assertIsNone(self.tickets[0].answered_at)
self.assertIsNone(self.tickets[1].answer)
self.assertIsNone(self.tickets[1].answered_at)
self.assertEqual(self.tickets[2].answer, ANSWER)
self.assertIsNotNone(self.tickets[2].answered_at)
def test_submit_errors(self):
self.assertResponseError(self.submit_exam.post(),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=123),
errors.InvalidParameter('session_id'))
self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers=[]),
errors.InvalidParameter('answers'))
self.assertResponseError(
self.submit_exam.post(session_id=uuid_str(), answers={}),
errors.ExamNotFound)
self.session.start_time += self.session.duration
self.session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
self.student_session.start_time = timezone.now()
self.student_session.save()
self.assertResponseError(self.submit_exam.post(
session_id=self.student_session.id, answers={}),
errors.ExamNotAvailable)
|
[
"import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\n\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\n\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, \\\n UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student = None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(\n start_time=timezone.now(), duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(\n student=self.student, exam_session=self.session)\n self.questions = [\n Question.objects.create(\n stage=Stage.first, type=QuestionType.single, max_score=1,\n text='test single question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.first, type=QuestionType.multi, max_score=1,\n text='test multi question', options=['a', 'b', 'c']\n ),\n Question.objects.create(\n stage=Stage.second, type=QuestionType.open, max_score=1,\n text='test open question', options=None,\n ),\n ]\n self.tickets = [\n ExamTicket.objects.create(\n student=self.student, session=self.student_session,\n question=question) for question in self.questions\n ]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200,\n (response.status_code, content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(\n self, response: http.JsonResponse,\n error: Union[errors.APIError, Type[errors.APIError]] = None\n ) -> Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400,\n (response.status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status,\n (response.status_code, content))\n self.assertEqual(content['error'], error.message,\n (response.status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n\n self.assertEqual(response.cookies['student'].value, self.student.id)\n\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(\n user_session['started_at'], self.session.start_time.isoformat())\n self.assertEqual(user_session['duration'],\n self.session.duration.total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'],\n self.session.start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'],\n sum(t.score for t in self.tickets))\n\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = \\\n ApiClient('/api/tickets', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [\n x.id for x in sorted(self.tickets, key=lambda x: x.question.stage)\n ])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n\n result = self.assertResponseSuccess(\n self.get_exam_questions.post(session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(\n self.get_exam_questions.post(session_id=uuid_str()),\n errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(\n self.get_exam_questions.post(session_id=self.student_session.id),\n errors.Unauthorized)\n\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = \\\n random.randint(0, len(ticket.question.options)-1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(\n list(range(0, len(ticket.question.options))),\n k=random.randint(0, len(ticket.question.options))\n )\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(\n ticket.answer, ticket.question.options[answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([\n ticket.question.options[x]\n for x in sorted(answers[ticket.id])\n ]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {\n # неверный порядковый индекс ответа\n self.tickets[0].id: len(self.tickets[0].question.options),\n # неверный тип ответа\n self.tickets[1].id: 0,\n # корректный ответ\n self.tickets[2].id: ANSWER,\n # неверный ид билета\n uuid_str(): ANSWER,\n # несуществующий тикет\n self.tickets[2].id + 1: ANSWER,\n }\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str()),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers=[]),\n errors.InvalidParameter('answers'))\n self.assertResponseError(\n self.submit_exam.post(session_id=uuid_str(), answers={}),\n errors.ExamNotFound)\n\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(\n session_id=self.student_session.id, answers={}),\n errors.ExamNotAvailable)\n",
"import random\nfrom datetime import timedelta\nfrom typing import Union, Type, Tuple, List, Dict\nfrom django import http\nfrom django.test import TestCase, Client\nfrom django.utils import timezone\nfrom exam_web import errors\nfrom exam_web.models import Student, AcademyGroup, uuid_str, ExamSession, UserSession, Question, Stage, QuestionType, ExamTicket, ExamStatus\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n\n def __call__(self, **kwargs):\n raise AttributeError('Use `get` or `post` methods instead')\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n\n def __init__(self, path: str, student: Student=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.student = student\n self.path = path\n self.headers = {'content_type': 'application/json'}\n if student:\n self.cookies['student'] = student.id\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <function token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n <function token>\n\n def path_params(self, **params):\n return ApiClient(self.path.format(**params), self.student)\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <function token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n <function token>\n <function token>\n\n def get(self, **kwargs):\n return super().get(self.path, data=kwargs, **self.headers)\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <function token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n <function token>\n <function token>\n <function token>\n\n def post(self, **json):\n return super().post(self.path, data=json, **self.headers)\n <function token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n\n\nclass ApiClient(Client):\n path: str\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n\n def assertResponseSuccess(self, response: http.HttpResponse):\n content = response.content.decode()\n self.assertEqual(response.status_code, 200, (response.status_code,\n content))\n content = response.json()\n self.assertIn('result', content, content)\n return content['result']\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n\n def setup_exam_objects(self):\n self.session = ExamSession.objects.create(start_time=timezone.now(),\n duration=timedelta(minutes=40))\n self.student_session = UserSession.objects.create(student=self.\n student, exam_session=self.session)\n self.questions = [Question.objects.create(stage=Stage.first, type=\n QuestionType.single, max_score=1, text='test single question',\n options=['a', 'b', 'c']), Question.objects.create(stage=Stage.\n first, type=QuestionType.multi, max_score=1, text=\n 'test multi question', options=['a', 'b', 'c']), Question.\n objects.create(stage=Stage.second, type=QuestionType.open,\n max_score=1, text='test open question', options=None)]\n self.tickets = [ExamTicket.objects.create(student=self.student,\n session=self.student_session, question=question) for question in\n self.questions]\n self.ticket_map = {x.id: x for x in self.tickets}\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <function token>\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n <function token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <function token>\n\n def assertResponseError(self, response: http.JsonResponse, error: Union\n [errors.APIError, Type[errors.APIError]]=None) ->Tuple[int, str]:\n content = response.json()\n self.assertGreaterEqual(response.status_code, 400, (response.\n status_code, content))\n self.assertIn('error', content, content)\n if error is not None:\n if isinstance(error, type):\n error = error()\n self.assertEqual(response.status_code, error.status, (response.\n status_code, content))\n self.assertEqual(content['error'], error.message, (response.\n status_code, content))\n return response.status_code, content['error']\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n\n @classmethod\n def tearDownClass(cls):\n cls.student.delete()\n cls.group.delete()\n super().tearDownClass()\n <function token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <function token>\n <function token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n\n @classmethod\n def setUpClass(cls):\n super().setUpClass()\n cls.group = AcademyGroup.objects.create(name='test_group')\n cls.student = Student.objects.create(name='test user', group=cls.group)\n <function token>\n <function token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <function token>\n <function token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n <function token>\n <function token>\n <function token>\n\n def teardown_exam_objects(self):\n for ticket in self.tickets:\n ticket.delete()\n for question in self.questions:\n question.delete()\n self.student_session.delete()\n <function token>\n <function token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n\n\nclass ApiTestCase(TestCase):\n group: AcademyGroup\n student: Student\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n\n def test_authorized(self):\n response = self.authorize.post(token=self.student.id)\n result = self.assertResponseSuccess(response)\n self.assertEqual(response.cookies['student'].value, self.student.id)\n self.assertEqual(result['name'], self.student.name)\n self.assertEqual(result['group'], self.group.name)\n self.assertEqual(result['id'], self.student.id)\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n <function token>\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n\n def test_authorized_invalid_params(self):\n response = self.authorize.post()\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.post(token=12345678)\n self.assertResponseError(response, errors.InvalidParameter('token'))\n response = self.authorize.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n\n def setUp(self):\n super().setUp()\n self.authorize = ApiClient('/api/authorize')\n <function token>\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n <function token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n <function token>\n <function token>\n\n def test_authorized_unknown_token(self):\n response = self.authorize.post(token=uuid_str())\n self.assertResponseError(response, errors.Unauthorized)\n <function token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass TestAuthorize(ApiTestCase):\n authorize: ApiClient\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_non_available(self):\n self.session.start_time = timezone.now() + self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.not_available)\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n <function token>\n\n def test_get_exams_available(self):\n result = self.assertResponseSuccess(self.get_exams.get())\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 1)\n user_session = result[0]\n self.assertEqual(user_session['started_at'], self.session.\n start_time.isoformat())\n self.assertEqual(user_session['duration'], self.session.duration.\n total_seconds() / 60)\n self.assertEqual(user_session['checked_in'], False)\n self.assertEqual(user_session['finished_at'], None)\n self.assertEqual(user_session['status'], ExamStatus.available.value)\n self.assertEqual(user_session['score'], None)\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exams = ApiClient('/api/exams', student=self.student)\n self.setup_exam_objects()\n <function token>\n <function token>\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n <function token>\n <function token>\n <function token>\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n\n def test_get_exams_unauthorized(self):\n self.get_exams.cookies = {}\n self.assertResponseError(self.get_exams.get(), errors.Unauthorized)\n response = self.get_exams.post()\n self.assertEqual(response.status_code, 405)\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n <function token>\n <function token>\n <function token>\n\n def test_get_exams_check_in(self):\n self.student_session.started_at = timezone.now()\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['checked_in'], True)\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n <function token>\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_get_exams_submitted(self):\n now = timezone.now()\n self.student_session.started_at = timezone.now()\n self.student_session.finished_at = now\n self.student_session.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['finished_at'], now.isoformat())\n self.assertEqual(user_session['status'], ExamStatus.submitted)\n self.assertEqual(user_session['score'], None)\n <function token>\n <function token>\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_get_exams_score(self):\n for ticket in self.tickets:\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], sum(t.score for t in self.\n tickets))\n self.tickets[0].score = None\n self.tickets[0].save()\n result = self.assertResponseSuccess(self.get_exams.get())\n user_session = result[0]\n self.assertEqual(user_session['score'], None)\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamSessions(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n\n def test_get_exam_questions_not_available(self):\n self.session.start_time += self.session.duration\n self.session.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.not_available)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), 0)\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n\n def setUp(self):\n super().setUp()\n self.get_exam_questions = ApiClient('/api/tickets', student=self.\n student)\n self.setup_exam_objects()\n <function token>\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n\n def test_get_exam_questions_invalid_params(self):\n self.assertResponseError(self.get_exam_questions.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.get_exam_questions.post(session_id=\n uuid_str()), errors.ExamNotFound)\n self.get_exam_questions.cookies = {}\n self.assertResponseError(self.get_exam_questions.post(session_id=\n self.student_session.id), errors.Unauthorized)\n response = self.get_exam_questions.get()\n self.assertEqual(response.status_code, 405)\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n\n def test_get_exam_questions_submitted_and_scored(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.score = 1.0\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], sum(t.score for t in self.tickets))\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n self.assertEqual(question['score'], ticket.score)\n <function token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n\n def test_get_exam_questions(self):\n self.assertFalse(self.student_session.check_in)\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n self.assertEqual([x['id'] for x in questions], [x.id for x in\n sorted(self.tickets, key=lambda x: x.question.stage)])\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket_question = ticket.question\n self.assertEqual(question.pop('id'), ticket.id)\n view = ticket_question.as_dict\n view.pop('id')\n self.assertEqual(question, view)\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n <function token>\n <function token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n <function token>\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n\n def test_get_exam_questions_submitted(self):\n self.student_session.finished_at = timezone.now()\n self.student_session.save()\n ANSWER = 'answer'\n for ticket in self.tickets:\n ticket.answer = ANSWER\n ticket.save()\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.submitted)\n self.assertEqual(result['score'], None)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n for question in questions:\n ticket = self.ticket_map[question['id']]\n ticket.refresh_from_db()\n answer = question.pop('answer')\n self.assertEqual(answer, ticket.answer)\n self.assertEqual(question['score'], None)\n <function token>\n <function token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n <function token>\n\n def test_get_exam_questions_already_checked_in(self):\n self.student_session.check_in = True\n checkin_date = self.student_session.started_at\n result = self.assertResponseSuccess(self.get_exam_questions.post(\n session_id=self.student_session.id))\n self.assertEqual(result['status'], ExamStatus.available)\n self.assertEqual(result['score'], None)\n self.student_session.refresh_from_db()\n self.assertTrue(self.student_session.check_in)\n self.assertEqual(self.student_session.started_at, checkin_date)\n questions = result['questions']\n self.assertIsInstance(questions, list)\n self.assertEqual(len(questions), len(self.tickets))\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestGetExamTickets(ApiTestCase):\n get_exams: ApiClient\n session: ExamSession\n student_session: UserSession\n questions: List[Question]\n tickets: List[ExamTicket]\n ticket_map: Dict[str, ExamTicket]\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n\n def test_submit_partial_answer_errors(self):\n ANSWER = 'answer'\n answers = {self.tickets[0].id: len(self.tickets[0].question.options\n ), self.tickets[1].id: 0, self.tickets[2].id: ANSWER, uuid_str(\n ): ANSWER, (self.tickets[2].id + 1): ANSWER}\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(self.tickets[0].answer)\n self.assertIsNone(self.tickets[0].answered_at)\n self.assertIsNone(self.tickets[1].answer)\n self.assertIsNone(self.tickets[1].answered_at)\n self.assertEqual(self.tickets[2].answer, ANSWER)\n self.assertIsNotNone(self.tickets[2].answered_at)\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n\n def test_submit_without_any_answer(self):\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers={}))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n self.assertIsNone(ticket.answered_at)\n self.assertIsNone(ticket.answer)\n <function token>\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n\n def setUp(self):\n super().setUp()\n self.submit_exam = ApiClient('/api/submit', student=self.student)\n self.setup_exam_objects()\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n <function token>\n <function token>\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n <function token>\n\n def tearDown(self):\n self.teardown_exam_objects()\n super().tearDown()\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n <function token>\n <function token>\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n <function token>\n <function token>\n\n def test_submit_exam(self):\n answers = {}\n ANSWER = 'answer'\n for ticket in self.tickets:\n if ticket.question.type == QuestionType.single:\n answers[ticket.id] = random.randint(0, len(ticket.question.\n options) - 1)\n elif ticket.question.type == QuestionType.multi:\n answers[ticket.id] = random.sample(list(range(0, len(ticket\n .question.options))), k=random.randint(0, len(ticket.\n question.options)))\n else:\n answers[ticket.id] = ANSWER\n result = self.assertResponseSuccess(self.submit_exam.post(\n session_id=self.student_session.id, answers=answers))\n self.assertEqual(result, True)\n self.student_session.refresh_from_db()\n self.assertEqual(self.student_session.status, ExamStatus.submitted)\n for ticket in self.tickets:\n ticket.refresh_from_db()\n if ticket.question.type == QuestionType.single:\n self.assertEqual(ticket.answer, ticket.question.options[\n answers[ticket.id]])\n elif ticket.question.type == QuestionType.multi:\n self.assertEqual(ticket.answer, ';'.join([ticket.question.\n options[x] for x in sorted(answers[ticket.id])]))\n self.assertIsNotNone(ticket.answered_at)\n <function token>\n <function token>\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def test_submit_errors(self):\n self.assertResponseError(self.submit_exam.post(), errors.\n InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=123),\n errors.InvalidParameter('session_id'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n )), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers=[]), errors.InvalidParameter('answers'))\n self.assertResponseError(self.submit_exam.post(session_id=uuid_str(\n ), answers={}), errors.ExamNotFound)\n self.session.start_time += self.session.duration\n self.session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n self.student_session.start_time = timezone.now()\n self.student_session.save()\n self.assertResponseError(self.submit_exam.post(session_id=self.\n student_session.id, answers={}), errors.ExamNotAvailable)\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass TestSubmitExam(ApiTestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
9,790 |
1fda8274024bdf74e7fbd4ac4a27d6cfe6032a13
|
from distutils.core import setup
setup(name='greeker',
version='0.3.2-git',
description="scrambles nouns in an XML document to produce a specimen for layout testing",
author="Brian Tingle",
author_email="[email protected]",
url="http://tingletech.github.com/greeker.py/",
install_requires=["inflect>=0.2.1", "lxml>=2.3.2", "nltk>=2.0.1rc2-git", "numpy", "argparse"],
py_modules=['greeker'],
scripts=['greeker.py'],
)
|
[
"from distutils.core import setup\nsetup(name='greeker',\n version='0.3.2-git',\n description=\"scrambles nouns in an XML document to produce a specimen for layout testing\",\n author=\"Brian Tingle\",\n author_email=\"[email protected]\",\n url=\"http://tingletech.github.com/greeker.py/\",\n install_requires=[\"inflect>=0.2.1\", \"lxml>=2.3.2\", \"nltk>=2.0.1rc2-git\", \"numpy\", \"argparse\"],\n py_modules=['greeker'],\n scripts=['greeker.py'],\n )\n",
"from distutils.core import setup\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n",
"<import token>\nsetup(name='greeker', version='0.3.2-git', description=\n 'scrambles nouns in an XML document to produce a specimen for layout testing'\n , author='Brian Tingle', author_email=\n '[email protected]', url=\n 'http://tingletech.github.com/greeker.py/', install_requires=[\n 'inflect>=0.2.1', 'lxml>=2.3.2', 'nltk>=2.0.1rc2-git', 'numpy',\n 'argparse'], py_modules=['greeker'], scripts=['greeker.py'])\n",
"<import token>\n<code token>\n"
] | false |
9,791 |
75217256d88c32ed1c502bc104c30092bf74382d
|
# Find sum/count of Prime digits in a number
|
[
"# Find sum/count of Prime digits in a number",
""
] | false |
9,792 |
acd0b9019ef413699b47ecb2b66a0980cf3aa81f
|
from cudasim.ParsedModel import ParsedModel
import re
import copy
class Writer:
def __init__(self):
pass
# replace the species and parameters recursively
@staticmethod
def rep(string, find, replace):
ex = find + "[^0-9]"
while re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + " " + string[res.end() - 1:]
ex = find + "$"
if re.search(ex, string) is not None:
res = re.search(ex, string)
string = string[0:res.start()] + replace + " " + string[res.end():]
return string
def categorise_variables(self):
# form a list of the species, and parameters which are set by rate rules
model = self.parser.parsedModel
rule_params = []
rule_values = []
constant_params = []
constant_values = []
for i in range(len(model.listOfParameter)):
is_constant = True
if not model.listOfParameter[i].getConstant():
for k in range(len(model.listOfRules)):
if model.listOfRules[k].isRate() and model.ruleVariable[k] == model.parameterId[i]:
rule_params.append(model.parameterId[i])
rule_values.append(str(model.parameter[i]))
is_constant = False
if is_constant:
constant_params.append(model.parameterId[i])
constant_values.append(str(model.parameter[i]))
species_list = copy.copy(model.speciesId)
species_list.extend(rule_params)
species_values = map(lambda x: str(x), model.initValues)
species_values.extend(rule_values)
return species_list, constant_params, species_values, constant_values
|
[
"from cudasim.ParsedModel import ParsedModel\nimport re\nimport copy\n\nclass Writer:\n\n def __init__(self):\n pass\n\n # replace the species and parameters recursively\n @staticmethod\n def rep(string, find, replace):\n ex = find + \"[^0-9]\"\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + \" \" + string[res.end() - 1:]\n\n ex = find + \"$\"\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + \" \" + string[res.end():]\n\n return string\n\n def categorise_variables(self):\n # form a list of the species, and parameters which are set by rate rules\n model = self.parser.parsedModel\n\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n\n return species_list, constant_params, species_values, constant_values\n",
"from cudasim.ParsedModel import ParsedModel\nimport re\nimport copy\n\n\nclass Writer:\n\n def __init__(self):\n pass\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"<import token>\n\n\nclass Writer:\n\n def __init__(self):\n pass\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"<import token>\n\n\nclass Writer:\n <function token>\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n\n def categorise_variables(self):\n model = self.parser.parsedModel\n rule_params = []\n rule_values = []\n constant_params = []\n constant_values = []\n for i in range(len(model.listOfParameter)):\n is_constant = True\n if not model.listOfParameter[i].getConstant():\n for k in range(len(model.listOfRules)):\n if model.listOfRules[k].isRate() and model.ruleVariable[k\n ] == model.parameterId[i]:\n rule_params.append(model.parameterId[i])\n rule_values.append(str(model.parameter[i]))\n is_constant = False\n if is_constant:\n constant_params.append(model.parameterId[i])\n constant_values.append(str(model.parameter[i]))\n species_list = copy.copy(model.speciesId)\n species_list.extend(rule_params)\n species_values = map(lambda x: str(x), model.initValues)\n species_values.extend(rule_values)\n return species_list, constant_params, species_values, constant_values\n",
"<import token>\n\n\nclass Writer:\n <function token>\n\n @staticmethod\n def rep(string, find, replace):\n ex = find + '[^0-9]'\n while re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end\n () - 1:]\n ex = find + '$'\n if re.search(ex, string) is not None:\n res = re.search(ex, string)\n string = string[0:res.start()] + replace + ' ' + string[res.end():]\n return string\n <function token>\n",
"<import token>\n\n\nclass Writer:\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
9,793 |
c9b62328a463fd38f3dbd1e7b5e1990f7eec1dba
|
from django.shortcuts import render
from django.http import HttpResponse
def view1(request):
return HttpResponse(" Hey..,This is the first view using HttpResponce!")
def view2(request):
context={"tag_var":"tag_var"}
return render(request,"new.html",context)
# Create your views here.
|
[
"from django.shortcuts import render\nfrom django.http import HttpResponse\ndef view1(request):\n return HttpResponse(\" Hey..,This is the first view using HttpResponce!\")\ndef view2(request):\n context={\"tag_var\":\"tag_var\"}\n return render(request,\"new.html\",context)\n# Create your views here.\n",
"from django.shortcuts import render\nfrom django.http import HttpResponse\n\n\ndef view1(request):\n return HttpResponse(' Hey..,This is the first view using HttpResponce!')\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"<import token>\n\n\ndef view1(request):\n return HttpResponse(' Hey..,This is the first view using HttpResponce!')\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"<import token>\n<function token>\n\n\ndef view2(request):\n context = {'tag_var': 'tag_var'}\n return render(request, 'new.html', context)\n",
"<import token>\n<function token>\n<function token>\n"
] | false |
9,794 |
6cd250b3bffd87657ec7cc28eaffe817c6d9f73f
|
# Generated by Django 2.0.3 on 2018-04-30 16:25
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('threads', '0007_auto_20180430_1617'),
]
operations = [
migrations.AlterField(
model_name='thread',
name='last_activity',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
[
"# Generated by Django 2.0.3 on 2018-04-30 16:25\n\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('threads', '0007_auto_20180430_1617'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='thread',\n name='last_activity',\n field=models.DateTimeField(default=django.utils.timezone.now),\n ),\n ]\n",
"from django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n dependencies = [('threads', '0007_auto_20180430_1617')]\n operations = [migrations.AlterField(model_name='thread', name=\n 'last_activity', field=models.DateTimeField(default=django.utils.\n timezone.now))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('threads', '0007_auto_20180430_1617')]\n operations = [migrations.AlterField(model_name='thread', name=\n 'last_activity', field=models.DateTimeField(default=django.utils.\n timezone.now))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
9,795 |
00099cab0c816c76fc0fa94d7905175feb6919cf
|
import django.dispatch
property_viewed = django.dispatch.Signal(providing_args=["property","user", "request", "response"])
|
[
"import django.dispatch\n\nproperty_viewed = django.dispatch.Signal(providing_args=[\"property\",\"user\", \"request\", \"response\"])",
"import django.dispatch\nproperty_viewed = django.dispatch.Signal(providing_args=['property', 'user',\n 'request', 'response'])\n",
"<import token>\nproperty_viewed = django.dispatch.Signal(providing_args=['property', 'user',\n 'request', 'response'])\n",
"<import token>\n<assignment token>\n"
] | false |
9,796 |
8de36400f21bfb4e24703d5a65471a961e1afddc
|
#coding=utf-8
from selenium import webdriver
wd=webdriver.Firefox()
wd.get('https://www.baidu.com/')
wd.find_element_by_id('kw').send_keys(u'哈哈')
wd.quit()
|
[
"#coding=utf-8\n\nfrom selenium import webdriver\n\nwd=webdriver.Firefox()\nwd.get('https://www.baidu.com/')\nwd.find_element_by_id('kw').send_keys(u'哈哈')\n\nwd.quit()\n",
"from selenium import webdriver\nwd = webdriver.Firefox()\nwd.get('https://www.baidu.com/')\nwd.find_element_by_id('kw').send_keys(u'哈哈')\nwd.quit()\n",
"<import token>\nwd = webdriver.Firefox()\nwd.get('https://www.baidu.com/')\nwd.find_element_by_id('kw').send_keys(u'哈哈')\nwd.quit()\n",
"<import token>\n<assignment token>\nwd.get('https://www.baidu.com/')\nwd.find_element_by_id('kw').send_keys(u'哈哈')\nwd.quit()\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
9,797 |
a52f009a755b45f8ed653a4a0385b1eb667f2318
|
__author__ = 'changwoncheo'
# -*- coding: utf-8 -*-
import threading
import logging
logging.basicConfig(filename='crawl2.log',level=logging.DEBUG)
class NoParsingFilter(logging.Filter):
def filter(self, record):
msg = record.getMessage()
return not ('Starting' in msg or 'GET' in msg)
logger = logging.getLogger('Crawler')
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.CRITICAL) #로깅 되지 않도록
import os
import re
reobj_album = re.compile('\'(.*)\'')#앨범 정규식 (javascript('숫자'))
reobj_djIndex = re.compile(',\'(.*)\'')#앨범 정규식 (javascript('숫자','숫자'))
reobj_filename = re.compile('/(\w*[.]\w*)$')#파일이름 정규식
category = {102:'발라드',103:'댄스',104:'랩_합합',105:'R&B_Soul',106:'록',107:'일렉트로니카',108:'트로트',109:'포크',110:'인디음악'}
def tapNewlineStrip(str):
return str.encode('utf-8').replace('\n','').replace('\t','').decode('utf-8')
def writeJson(fileName,dict):
import json
print dict
with open(fileName, 'w') as outfile:
json.dump(dict, outfile, ensure_ascii = False, encoding = 'utf-8')
|
[
"__author__ = 'changwoncheo'\n# -*- coding: utf-8 -*-\nimport threading\nimport logging\nlogging.basicConfig(filename='crawl2.log',level=logging.DEBUG)\nclass NoParsingFilter(logging.Filter):\n def filter(self, record):\n msg = record.getMessage()\n return not ('Starting' in msg or 'GET' in msg)\nlogger = logging.getLogger('Crawler')\nrequests_log = logging.getLogger(\"requests\")\nrequests_log.setLevel(logging.CRITICAL) #로깅 되지 않도록\n\n\nimport os\nimport re\nreobj_album = re.compile('\\'(.*)\\'')#앨범 정규식 (javascript('숫자'))\nreobj_djIndex = re.compile(',\\'(.*)\\'')#앨범 정규식 (javascript('숫자','숫자'))\nreobj_filename = re.compile('/(\\w*[.]\\w*)$')#파일이름 정규식\ncategory = {102:'발라드',103:'댄스',104:'랩_합합',105:'R&B_Soul',106:'록',107:'일렉트로니카',108:'트로트',109:'포크',110:'인디음악'}\ndef tapNewlineStrip(str):\n return str.encode('utf-8').replace('\\n','').replace('\\t','').decode('utf-8')\n\ndef writeJson(fileName,dict):\n import json\n print dict\n with open(fileName, 'w') as outfile:\n json.dump(dict, outfile, ensure_ascii = False, encoding = 'utf-8')\n\n"
] | true |
9,798 |
93e534e8d425510b59310dcbfc5bca9cc32f245e
|
import sys
import random
#import matplotlib.pyplot as plt
import numpy as np
import time
class Waterfilling:
"""
initializes x and r with optimal flow allocations
and link fair share rates for traffic matrix routes and link
capacities c, and level with number of levels
after running the waterfilling algorithm. note
that if sum of flow allocations at a link is less than capacity
then fair share of link is float('inf').
not that routes and c must be initialized before calling this.
"""
def __init__(self, routes, c, log, prec_library):
#log = True
#print "Waterfilling"
#print mpmath.mp
(self.num_flows, self.num_links) = routes.shape
self.levels = np.ones((self.num_links, 1)) * float('inf')
self.prec_library = prec_library
eps = prec_library.eps1
weights = np.ones((self.num_flows,1))
#print("weights", weights.shape, weights)
#print("routes", routes.shape, routes)
#self.r = np.ones((self.num_links,1)) * mpf_inf
#self.x = np.ones((self.num_flows,1)) * mpf_inf
x = np.zeros((self.num_flows,1))
active_flows = np.ones((self.num_flows, 1), dtype=bool)
rem_cap = c #np.ones((self.num_links, 1)) * prec_library.mpf_one
# for i in range(self.num_links):
# rem_cap[i] = prec_library.mpf(c[i,0])
self.max_level = 0
num_active_flows = np.count_nonzero(active_flows, axis=0)
#print(num_active_flows,"flows left")
while num_active_flows > 0:
# number of rem flows on all links
link_weights = np.dot(routes.T, weights)
assert(rem_cap.shape == link_weights.shape)
try:
fair_shares = np.where(link_weights>0, rem_cap/link_weights, float('inf'))
except:
pass
#print("link_weights", link_weights)
#print("rem_cap", rem_cap)
#print("fair_shares", fair_shares)
fair_shares.reshape(self.num_links, 1)
bl = np.argmin(fair_shares)
#print ("bl",type(bl),bl)
inc = float(fair_shares[bl, 0])
assert(inc < float('inf'))
# increase level, only when link with smallest fair share rate
# has a rate larger than last one, handles the following example
# two links, each cap 10.0, each has one flow, and none in common
# each link identified in different iterations of this loop
if self.max_level == 0 or inc > eps: self.max_level += 1
x = np.where(active_flows, x + inc * weights, x)
if log:
print "In round",self.max_level,\
" link", bl, "has smallest fair share", inc, "b/s",\
"Next rate increase is", inc, " (type ", type(inc), ") cuz of bl ",\
bl, " with rem_cap ", rem_cap[bl,0], " b/s",\
"and ", link_weights[bl,0] , " of the total ",\
num_active_flows, " remaining flows"
rem_cap = rem_cap - inc * link_weights
neg_cap = list(np.where(rem_cap < -1e7)[0]) # for each (aka only) column
if (len(neg_cap) > 0):
print >> sys.stderr, "warning! in watefilling hp links with neg. rem_cap ", neg_cap
bf = np.where(routes[:,bl] > 0)[0]
active_flows[bf] = 0
num_active_flows = np.count_nonzero(active_flows, axis=0)
#print(num_active_flows,"flows left")
weights[bf] = 0
self.levels[bl] = self.max_level
# get max. rate at each link
r = np.ones((self.num_links,1)) * float('inf')
for e in range(self.num_links):
flows = np.nonzero(routes[:, e])[0]
if len(flows) > 0:
sum_demands = sum(x[flows])[0]
cap = c[e,0]
diff = abs(sum_demands - cap)
if (sum_demands > cap or diff < eps):
r[e] = max(x[flows])
print "link",e,"has rate", r[e]
self.level = self.max_level
self.x = x
self.r = r
self.bottleneck_links_arr = np.where(self.r < float('inf'))[0]
self.bottleneck_links = {}
self.non_bottleneck_links = {}
self.sat_flows = {}
self.unsat_flows = {}
# class Eps:
# def __init__(self):
# self.eps1 = 1e-7
# pass
# def main():
# for num_flows in [10, 100, 1000, 10000]:
# start = time.time()
# routes = np.ones((num_flows, 2))
# routes[:, 1] = 0
# routes[0:2, 1] = 1
# routes[0, 0] = 0
# c = np.ones((2,1))
# wf = Waterfilling(routes, c, True, Eps())
# stop = time.time()
# elapsed = stop - start
# print("num_flows", num_flows, "elapsed", elapsed,"s")
# #print wf.x
# #print wf.r
# #print wf.level
# pass
# main()
|
[
"import sys\nimport random\n#import matplotlib.pyplot as plt\nimport numpy as np\nimport time\n\nclass Waterfilling:\n \"\"\"\n initializes x and r with optimal flow allocations\n and link fair share rates for traffic matrix routes and link\n capacities c, and level with number of levels\n after running the waterfilling algorithm. note\n that if sum of flow allocations at a link is less than capacity\n then fair share of link is float('inf').\n not that routes and c must be initialized before calling this.\n \"\"\" \n\n def __init__(self, routes, c, log, prec_library):\n #log = True\n #print \"Waterfilling\"\n #print mpmath.mp\n \n (self.num_flows, self.num_links) = routes.shape\n self.levels = np.ones((self.num_links, 1)) * float('inf')\n self.prec_library = prec_library\n \n eps = prec_library.eps1\n weights = np.ones((self.num_flows,1))\n #print(\"weights\", weights.shape, weights)\n #print(\"routes\", routes.shape, routes)\n #self.r = np.ones((self.num_links,1)) * mpf_inf\n #self.x = np.ones((self.num_flows,1)) * mpf_inf \n\n x = np.zeros((self.num_flows,1))\n active_flows = np.ones((self.num_flows, 1), dtype=bool)\n\n \n rem_cap = c #np.ones((self.num_links, 1)) * prec_library.mpf_one\n # for i in range(self.num_links):\n # rem_cap[i] = prec_library.mpf(c[i,0])\n\n\n self.max_level = 0\n num_active_flows = np.count_nonzero(active_flows, axis=0)\n #print(num_active_flows,\"flows left\")\n\n while num_active_flows > 0:\n \n # number of rem flows on all links\n link_weights = np.dot(routes.T, weights)\n assert(rem_cap.shape == link_weights.shape)\n try:\n fair_shares = np.where(link_weights>0, rem_cap/link_weights, float('inf'))\n except:\n pass\n #print(\"link_weights\", link_weights)\n #print(\"rem_cap\", rem_cap)\n #print(\"fair_shares\", fair_shares)\n fair_shares.reshape(self.num_links, 1)\n bl = np.argmin(fair_shares)\n #print (\"bl\",type(bl),bl)\n inc = float(fair_shares[bl, 0])\n assert(inc < float('inf'))\n\n # increase level, only when link with smallest fair share rate\n # has a rate larger than last one, handles the following example\n # two links, each cap 10.0, each has one flow, and none in common\n # each link identified in different iterations of this loop\n if self.max_level == 0 or inc > eps: self.max_level += 1\n x = np.where(active_flows, x + inc * weights, x)\n\n if log:\n print \"In round\",self.max_level,\\\n \" link\", bl, \"has smallest fair share\", inc, \"b/s\",\\\n \"Next rate increase is\", inc, \" (type \", type(inc), \") cuz of bl \",\\\n bl, \" with rem_cap \", rem_cap[bl,0], \" b/s\",\\\n \"and \", link_weights[bl,0] , \" of the total \",\\\n num_active_flows, \" remaining flows\"\n rem_cap = rem_cap - inc * link_weights\n neg_cap = list(np.where(rem_cap < -1e7)[0]) # for each (aka only) column \n if (len(neg_cap) > 0):\n print >> sys.stderr, \"warning! in watefilling hp links with neg. rem_cap \", neg_cap\n bf = np.where(routes[:,bl] > 0)[0]\n active_flows[bf] = 0\n num_active_flows = np.count_nonzero(active_flows, axis=0)\n #print(num_active_flows,\"flows left\")\n weights[bf] = 0\n self.levels[bl] = self.max_level\n \n # get max. rate at each link\n r = np.ones((self.num_links,1)) * float('inf')\n for e in range(self.num_links):\n flows = np.nonzero(routes[:, e])[0]\n if len(flows) > 0:\n sum_demands = sum(x[flows])[0]\n cap = c[e,0]\n diff = abs(sum_demands - cap)\n if (sum_demands > cap or diff < eps):\n r[e] = max(x[flows])\n print \"link\",e,\"has rate\", r[e]\n\n self.level = self.max_level\n self.x = x\n self.r = r\n\n self.bottleneck_links_arr = np.where(self.r < float('inf'))[0]\n self.bottleneck_links = {}\n self.non_bottleneck_links = {}\n\n self.sat_flows = {}\n self.unsat_flows = {}\n\n# class Eps:\n# def __init__(self):\n# self.eps1 = 1e-7\n# pass\n\n# def main():\n# for num_flows in [10, 100, 1000, 10000]:\n# start = time.time()\n# routes = np.ones((num_flows, 2))\n# routes[:, 1] = 0\n# routes[0:2, 1] = 1\n# routes[0, 0] = 0\n# c = np.ones((2,1))\n \n# wf = Waterfilling(routes, c, True, Eps())\n# stop = time.time()\n# elapsed = stop - start\n# print(\"num_flows\", num_flows, \"elapsed\", elapsed,\"s\")\n# #print wf.x\n# #print wf.r\n# #print wf.level\n# pass\n\n# main()\n"
] | true |
9,799 |
c4c24c36fe0afba61f8046055690f0c36df7098c
|
# Developed by : Jays Patel (cyberthreatinfo.ca)
# This script is use to find the python Composer packages vulnerabilities from linux machine and python source project.
import time
import glob2
import random
import os.path
from os import path
import ast
import sys
import commands
import re
import requests
from pkg_resources import parse_version
import json
import argparse
from tqdm import tqdm
from datetime import datetime
class getComposerVulnerabilities():
def __init__(self, reportPath, project, targetFolder, owner):
self.reportPath = reportPath
self.sourcefolder = targetFolder
self.project = project
if not path.exists("server.config"):
print "[ INFO ] server configuration json file not found in current directory"
sys.exit(1)
with open('server.config') as f:
configData = json.load(f)
self.tokenId = configData['tokenId']
self.server = configData['server']
self.port = configData['port']
self.protocol = configData['protocol']
try:
url = "%s://%s:%s/api/checkToken/%s" % (self.protocol, self.server, self.port, self.tokenId)
response = requests.request("GET", url)
tokenData = response.text
tokenData = json.loads(tokenData)
if tokenData['result']:
print "[ OK ] Token valid, start scanning...."
else:
print "[ INFO ] Token invalid or expire, please login on portal and verify the TokenId"
sys.exit(1)
except:
print "[ OK ] Server connection error, Please check internet connectivity"
sys.exit(1)
self.results = {}
self.results['header'] = {}
now = datetime.now()
self.report_name = now.strftime("%d-%m-%Y_%H:%M:%S")
self.report_path = reportPath
self.results['header']['Date'] = self.report_name
self.results['header']['Project'] = self.project
self.results['header']['Owner'] = owner
self.results['header']['Target'] = "source"
self.results['header']['docker'] = "False"
self.vuln_depe = []
self.vuln_found = []
self.testedWith = []
self.dependanciesCount = []
def gtEq(self, vers1, mVers):
if parse_version(mVers) >= parse_version(vers1):
return True
else:
return False
def gt(self, vers1, mVers):
if parse_version(mVers) > parse_version(vers1):
return True
else:
return False
def ltEq(self, vers1, mVers):
if parse_version(mVers) <= parse_version(vers1):
return True
else:
return False
def lt(self, vers1, mVers):
if parse_version(mVers) < parse_version(vers1):
return True
else:
return False
def eq(self, vers1, mVers):
if parse_version(mVers) == parse_version(vers1):
return True
else:
return False
def getLatestVersion(self, product, vendor, mVers):
response = requests.get('https://repo.packagist.org/p/%s/%s.json' % (vendor, product))
data = response.text
data = json.loads(data)
kData = []
for k,v in data['packages']['%s/%s' % (vendor, product)].items():
if re.findall(r'^v%s' % mVers, str(k)):
value = re.findall(r'%s' % mVers, str(k))[0]
kData.append(k)
max = "0.0"
for v in kData:
if parse_version(v) > parse_version(max):
max = v
return max
def getMatchVersionLists(self, product, vendor, version):
response = requests.get('https://semver.mwl.be/packages/%s/%s/match?constraint=%s&minimum-stability=stable' % (vendor, product, version))
data = response.text
data = json.loads(data)
return data
def maxValue(self, mVersions):
ver1 = '0.0'
for ver in mVersions:
if parse_version(ver) > parse_version(ver1):
ver1 = ver
return ver1
def matchVer(self, mVersions, product, vendor, cve_id, versions, reference, vuln_name, vectorString, baseScore, recommendation, pub_date, severity, dependancy, patch, cwe_text):
mVersions = self.getMatchVersionLists(product, vendor, mVersions)
mVer = self.maxValue(mVersions)
if severity.lower() == "medium" or severity.lower() == "moderate":
severity = "Medium"
elif severity.lower() == "high":
severity = "High"
elif severity.lower() == "low":
severity = "Low"
elif severity.lower() == "critical":
severity = "Critical"
if not patch:
patch = versions
for vers in versions.split(","):
if re.findall(r'\[.*:.*\]', str(vers)):
vers1 = re.findall(r'\[(.*):', str(vers))[0]
vers2 = re.findall(r':(.*)\]', str(vers))[0]
if self.gtEq(vers1, mVer) and self.ltEq(vers2, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity]['data'].append(res)
self.results['Issues'][severity]['header'].append(res1)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
elif re.findall(r'\(.*:.*\]', str(vers)):
vers1 = re.findall(r'\((.*):', str(vers))[0]
vers2 = re.findall(r':(.*)\]', str(vers))[0]
if self.gt(vers1, mVer) and self.ltEq(vers2, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity].append(res)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
elif re.findall(r'\[.*:.*\)', str(vers)):
vers1 = re.findall(r'\[(.*):', str(vers))[0]
vers2 = re.findall(r':(.*)\)', str(vers))[0]
if self.gtEq(vers1, mVer) and self.lt(vers2, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity]['data'].append(res)
self.results['Issues'][severity]['header'].append(res1)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
elif re.findall(r'\(.*:.*\)', str(vers)):
vers1 = re.findall(r'\((.*):', str(vers))[0]
vers2 = re.findall(r':(.*)\)', str(vers))[0]
if self.gt(vers1, mVer) and self.lt(vers2, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity]['data'].append(res)
self.results['Issues'][severity]['header'].append(res1)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
elif re.findall(r'\(.*:.*\)', str(vers)):
vers1 = re.findall(r'\((.*):', str(vers))[0]
vers2 = re.findall(r':(.*)\)', str(vers))[0]
if self.gt(vers1, mVer) and self.lt(vers2, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity]['data'].append(res)
self.results['Issues'][severity]['header'].append(res1)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
else:
vers1 = str(vers)
if self.eq(vers1, mVer):
res = {}
if severity not in self.results['Issues']:
self.results['Issues'][severity] = {}
self.results['Issues'][severity]['data'] = []
self.results['Issues'][severity]['header'] = []
res1 = {}
res1['CVEID'] = str(cve_id)
res1['Product'] = str(product)
res1['CWE'] = str(cwe_text)
res1['Severity'] = str(severity)
res['Product'] = str(product)
res['Vendor'] = str(vendor)
res['Severity'] = str(severity)
res['CVEID'] = str(cve_id)
res['Vector String'] = str(vectorString)
res['Vulnerability Name'] = str(vuln_name)
res['Patched Version'] = str(patch)
res['Recommendation'] = str(recommendation)
res['Reference'] = str(reference)
res['Publish Date'] = str(pub_date)
res['Introduced Through'] = str(dependancy)
res['Installed Version'] = str(mVer)
res['CWE'] = str(cwe_text)
if res not in self.results['Issues'][severity]['data']:
self.results['Issues'][severity]['data'].append(res)
self.results['Issues'][severity]['header'].append(res1)
if severity.lower() == "medium" or severity.lower() == "moderate":
self.med.append("Medium")
if severity.lower() == "high":
self.hig.append("High")
if severity.lower() == "low":
self.low.append("Low")
if severity.lower() == "critical":
self.cri.append("Critical")
self.vuln_found.append(product)
if product not in self.vuln_depe:
self.vuln_depe.append(product)
def getVulnData(self, product, vendor, mVersions, depend):
for row in self.responseData["results"]["%s/%s" % (vendor, product)]:
cve_id = row['cve_id']
versions = row['versions']
reference = row['reference']
vuln_name = row['vuln_name']
vectorString = row['vectorString']
baseScore = row['baseScore']
recommendation = row['recommendation']
pub_date = row['pub_date']
patch = row['patch']
severity = row['severity']
cwe_text = row['cwe_text']
self.matchVer(mVersions, product, vendor, cve_id, versions, reference, vuln_name, vectorString, baseScore, recommendation, pub_date, severity, depend, patch, cwe_text)
def getInstallPkgList(self):
self.installPackageLists = []
self.resultsPkg = {}
for file in glob2.glob('%s/**/composer.*' % (self.sourcefolder), recursive=True):
file = os.path.abspath(file)
filename = os.path.basename(file)
if 'files' not in self.resultsPkg:
self.resultsPkg['files'] = {}
if filename == "composer.lock":
if os.stat(file).st_size != 0:
with open(file) as f:
data = json.load(f)
if filename not in self.resultsPkg['files']:
self.resultsPkg['files'][filename] = {}
self.resultsPkg['files'][filename][file] = {}
if 'packages' in data:
for pkg in data['packages']:
package_name = pkg['name']
if "/" in package_name:
if package_name not in self.installPackageLists:
self.installPackageLists.append(package_name)
vendor = package_name.split("/")[0]
product = package_name.split("/")[1]
versions = pkg['version']
if package_name not in self.resultsPkg['files'][filename][file]:
self.resultsPkg['files'][filename][file][str(package_name)] = {}
self.resultsPkg['files'][filename][file][str(package_name)]["product"] = str(product)
self.resultsPkg['files'][filename][file][str(package_name)]["vendor"] = str(vendor)
self.resultsPkg['files'][filename][file][str(package_name)]["version"] = []
self.resultsPkg['files'][filename][file][str(package_name)]["depend"] = []
if versions not in self.resultsPkg['files'][filename][file][package_name]["version"]:
self.resultsPkg['files'][filename][file][package_name]["version"].append(str(versions))
if 'require' in pkg:
for d in pkg['require']:
if "/" in d:
if d not in self.installPackageLists:
self.installPackageLists.append(d)
vendor1 = d.split("/")[0]
product1 = d.split("/")[1]
versions1 = pkg['require'][d]
if d not in self.resultsPkg['files'][filename][file]:
self.resultsPkg['files'][filename][file][str(d)] = {}
self.resultsPkg['files'][filename][file][str(d)]["product"] = str(product1)
self.resultsPkg['files'][filename][file][str(d)]["vendor"] = str(vendor1)
self.resultsPkg['files'][filename][file][str(d)]["version"] = []
self.resultsPkg['files'][filename][file][str(d)]["depend"] = []
if versions1 not in self.resultsPkg['files'][filename][file][d]["version"]:
self.resultsPkg['files'][filename][file][str(d)]["version"].append(str(versions1))
if "%s@%s" % (str(package_name), str(versions)) not in self.resultsPkg['files'][filename][file][d]["depend"]:
self.resultsPkg['files'][filename][file][str(d)]["depend"].append("%s@%s" % (str(package_name), str(versions)))
if 'require-dev' in pkg:
for d in pkg['require-dev']:
if "/" in d:
if d not in self.installPackageLists:
self.installPackageLists.append(d)
vendor2 = d.split("/")[0]
product2 = d.split("/")[1]
versions2 = pkg['require-dev'][d]
if d not in self.resultsPkg['files'][filename][file]:
self.resultsPkg['files'][filename][file][str(d)] = {}
self.resultsPkg['files'][filename][file][str(d)]["product"] = str(product2)
self.resultsPkg['files'][filename][file][str(d)]["vendor"] = str(vendor2)
self.resultsPkg['files'][filename][file][str(d)]["version"] = []
self.resultsPkg['files'][filename][file][str(d)]["depend"] = []
if versions2 not in self.resultsPkg['files'][filename][file][d]["version"]:
self.resultsPkg['files'][filename][file][str(d)]["version"].append(str(versions2))
if "%s@%s" % (str(package_name), str(versions)) not in self.resultsPkg['files'][filename][file][d]["depend"]:
self.resultsPkg['files'][filename][file][str(d)]["depend"].append("%s@%s" % (str(package_name), str(versions)))
if filename == "composer.json":
if os.stat(file).st_size != 0:
with open(file) as f:
data = json.load(f)
if filename not in self.resultsPkg['files']:
self.resultsPkg['files'][filename] = {}
self.resultsPkg['files'][filename][file] = {}
if 'require' in data:
for d in data['require']:
if "/" in d:
if d not in self.installPackageLists:
self.installPackageLists.append(d)
vendor3 = d.split("/")[0]
product3 = d.split("/")[1]
versions3 = data['require'][d]
if d not in self.resultsPkg['files'][filename][file]:
self.resultsPkg['files'][filename][file][str(d)] = {}
self.resultsPkg['files'][filename][file][str(d)]["product"] = str(product3)
self.resultsPkg['files'][filename][file][str(d)]["vendor"] = str(vendor3)
self.resultsPkg['files'][filename][file][str(d)]["version"] = []
self.resultsPkg['files'][filename][file][str(d)]["depend"] = []
if str(versions3) not in self.resultsPkg['files'][filename][file][d]["version"]:
self.resultsPkg['files'][filename][file][str(d)]["version"].append(str(versions3))
if 'require-dev' in data:
for d in data['require-dev']:
if "/" in d:
if d not in self.installPackageLists:
self.installPackageLists.append(d)
vendor4 = d.split("/")[0]
product4 = d.split("/")[1]
versions4 = data['require-dev'][d]
if d not in self.resultsPkg['files'][filename][file]:
self.resultsPkg['files'][filename][file][str(d)] = {}
self.resultsPkg['files'][filename][file][str(d)]["product"] = str(product4)
self.resultsPkg['files'][filename][file][str(d)]["vendor"] = str(vendor4)
self.resultsPkg['files'][filename][file][str(d)]["version"] = []
self.resultsPkg['files'][filename][file][str(d)]["depend"] = []
if str(versions4) not in self.resultsPkg['files'][filename][file][d]["version"]:
self.resultsPkg['files'][filename][file][str(d)]["version"].append(str(versions4))
return self.resultsPkg
def getUnique(self, lists):
unique_list = []
for x in lists:
if x not in unique_list:
unique_list.append(x)
return unique_list
def scanComposerPackage(self):
print "[ OK ] Preparing..., It's take time to completed."
output = self.getInstallPkgList()
print "[ OK ] Database sync started"
self.syncData(self.installPackageLists)
print "[ OK ] Database sync comleted"
self.med = []
self.hig = []
self.low = []
self.cri = []
print "[ OK ] Scanning started"
self.results['Issues'] = {}
self.results['files'] = {}
for filename in output['files']:
print "[ OK ] Started %s file processing" % filename
if filename not in self.testedWith:
self.testedWith.append(filename)
if filename not in self.results['files']:
self.results['files'][filename] = {}
self.results['files'][filename]['packages'] = []
print "There are total %s %s files are processing" % (filename, len(output['files'][filename]))
for file in output['files'][filename]:
print "File %s Scanning Started" % file
for d in tqdm(output['files'][filename][file]):
vendor = output['files'][filename][file][d]['vendor']
product = output['files'][filename][file][d]['product']
version = output['files'][filename][file][d]['version']
depend = output['files'][filename][file][d]['depend']
if product not in self.dependanciesCount:
self.dependanciesCount.append(product)
self.getVulnData(product, vendor, version[0], ','.join(depend))
res = {}
res['product'] = product
res['version'] = version
res['file'] = file
res['Dependencies'] = ','.join(depend)
self.results['files'][filename]['packages'].append(res)
print "[ OK ] Scanning Completed"
self.results['header']['Tested With'] = ','.join(self.testedWith)
self.results['header']['Severity'] = {}
self.results['header']['Total Scanned Dependancies'] = len(self.dependanciesCount)
self.results['header']['Total Unique Vulnerabilities'] = len(self.vuln_found)
self.results['header']['Total Vulnerable Dependencies'] = len(self.getUnique(self.vuln_depe))
self.results['header']['Severity']['Low'] = len(self.low)
self.results['header']['Severity']['High'] = len(self.hig)
self.results['header']['Severity']['Medium'] = len(self.med)
self.results['header']['Severity']['Critical'] = len(self.cri)
with open("%s/%s.json" % (self.report_path, self.report_name), "w") as f:
json.dump(self.results, f)
print "[ OK ] Vulnerabilities Report ready - %s/%s" % (self.report_path, self.report_name)
url = "%s://%s:%s/api/report-upload/language/%s" % (self.protocol, self.server, self.port, self.tokenId)
fin = open('%s/%s.json' % (self.report_path, self.report_name), 'rb')
files = {'file': fin}
response = requests.post(url, files = files)
if response.status_code == 201:
print "[ OK ] Report Uploaded on server"
else:
print "[ ERROR ] Report Upload Error"
def syncData(self, productLists):
try:
url = "%s://%s:%s/api/scanDetailsVendor/composer" % (self.protocol, self.server, self.port)
headers = {
'Authorization': 'Basic QWRtaW5pc3RyYXRvcjpWZXJzYUAxMjM=',
'Content-Type': 'application/json'
}
payload = "{\"data\": \""+ ','.join(productLists) + "\"}"
response = requests.request("POST", url, headers=headers, data = payload)
responseData = response.json()
self.responseData = responseData
except:
print "[ OK ] Database sync error! Check internet connectivity"
sys.exit(1)
def query_yes_no(self, question, default="yes"):
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--reportPath', type=str, help='Enter Report Path', required=True)
parser.add_argument('-n', '--projectname', type=str, help='Enter Project Name', required=True)
parser.add_argument('-t', '--target', type=str, help='Enter target source folder', required=True)
parser.add_argument('-o', '--owner', type=str, help='Enter project owner')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s 1.0')
results = parser.parse_args()
if not results.owner:
owner = "Unknow"
else:
owner = results.owner
data = """
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the m
Do you want to accept ?
"""
res = getComposerVulnerabilities(results.reportPath, results.projectname, results.target, owner)
if res.query_yes_no(data):
res.scanComposerPackage()
else:
sys.exit(1)
|
[
"# Developed by : Jays Patel (cyberthreatinfo.ca)\n# This script is use to find the python Composer packages vulnerabilities from linux machine and python source project.\n\nimport time\nimport glob2\nimport random\nimport os.path\nfrom os import path\nimport ast\nimport sys\nimport commands\nimport re\nimport requests\nfrom pkg_resources import parse_version\nimport json\nimport argparse\nfrom tqdm import tqdm\nfrom datetime import datetime\n\n\nclass getComposerVulnerabilities():\n\tdef __init__(self, reportPath, project, targetFolder, owner):\n\t\tself.reportPath = reportPath\n self.sourcefolder = targetFolder\n self.project = project\n\n\n if not path.exists(\"server.config\"):\n print \"[ INFO ] server configuration json file not found in current directory\"\n sys.exit(1)\n\n\n with open('server.config') as f:\n configData = json.load(f)\n\n self.tokenId = configData['tokenId']\n self.server = configData['server']\n self.port = configData['port']\n self.protocol = configData['protocol']\n\n\t\ttry:\n url = \"%s://%s:%s/api/checkToken/%s\" % (self.protocol, self.server, self.port, self.tokenId)\n response = requests.request(\"GET\", url)\n tokenData = response.text\n tokenData = json.loads(tokenData)\n if tokenData['result']:\n print \"[ OK ] Token valid, start scanning....\"\n else:\n print \"[ INFO ] Token invalid or expire, please login on portal and verify the TokenId\"\n sys.exit(1)\n\t\texcept:\n print \"[ OK ] Server connection error, Please check internet connectivity\"\n sys.exit(1)\n\n\t\tself.results = {}\n self.results['header'] = {}\n now = datetime.now()\n self.report_name = now.strftime(\"%d-%m-%Y_%H:%M:%S\")\n self.report_path = reportPath\n\n self.results['header']['Date'] = self.report_name\n self.results['header']['Project'] = self.project\n self.results['header']['Owner'] = owner\n self.results['header']['Target'] = \"source\"\n\t\tself.results['header']['docker'] = \"False\"\n\n self.vuln_depe = []\n self.vuln_found = []\n self.testedWith = []\n self.dependanciesCount = []\n\n\tdef gtEq(self, vers1, mVers):\n if parse_version(mVers) >= parse_version(vers1):\n return True\n else:\n return False\n\n def gt(self, vers1, mVers):\n if parse_version(mVers) > parse_version(vers1):\n return True\n else:\n return False\n\n def ltEq(self, vers1, mVers):\n if parse_version(mVers) <= parse_version(vers1):\n return True\n else:\n return False\n\n\n def lt(self, vers1, mVers):\n if parse_version(mVers) < parse_version(vers1):\n return True\n else:\n return False\n\n def eq(self, vers1, mVers):\n if parse_version(mVers) == parse_version(vers1):\n return True\n else:\n return False\n\n\n\tdef getLatestVersion(self, product, vendor, mVers):\n\t\tresponse = requests.get('https://repo.packagist.org/p/%s/%s.json' % (vendor, product))\n\t\tdata = response.text\n\t\tdata = json.loads(data)\n\t\tkData = []\n\t\tfor k,v in data['packages']['%s/%s' % (vendor, product)].items():\n\t\t\tif re.findall(r'^v%s' % mVers, str(k)):\n\t\t\t\tvalue = re.findall(r'%s' % mVers, str(k))[0]\n\t\t\t\tkData.append(k)\n\n\t\tmax = \"0.0\"\n\t\tfor v in kData:\n\t\t\tif parse_version(v) > parse_version(max):\n\t\t\t\tmax = v\n\n\t\treturn max\n\t\t\n\tdef getMatchVersionLists(self, product, vendor, version):\n\t\tresponse = requests.get('https://semver.mwl.be/packages/%s/%s/match?constraint=%s&minimum-stability=stable' % (vendor, product, version))\n\t\tdata = response.text\n\t\tdata = json.loads(data)\n\t\treturn data\n\n def maxValue(self, mVersions):\n ver1 = '0.0'\n for ver in mVersions:\n if parse_version(ver) > parse_version(ver1):\n ver1 = ver\n\n return ver1\n\n\tdef matchVer(self, mVersions, product, vendor, cve_id, versions, reference, vuln_name, vectorString, baseScore, recommendation, pub_date, severity, dependancy, patch, cwe_text):\n\t\tmVersions = self.getMatchVersionLists(product, vendor, mVersions)\n\t\tmVer = self.maxValue(mVersions)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n severity = \"Medium\"\n elif severity.lower() == \"high\":\n severity = \"High\"\n elif severity.lower() == \"low\":\n severity = \"Low\"\n\t\telif severity.lower() == \"critical\":\n severity = \"Critical\"\n\n\t\tif not patch:\n\t\t\tpatch = versions\n\n\t\tfor vers in versions.split(\",\"):\n if re.findall(r'\\[.*:.*\\]', str(vers)):\n vers1 = re.findall(r'\\[(.*):', str(vers))[0]\n vers2 = re.findall(r':(.*)\\]', str(vers))[0]\n\n\t\t\tif self.gtEq(vers1, mVer) and self.ltEq(vers2, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n\n if res not in self.results['Issues'][severity]['data']:\n\t\t\t\t\tself.results['Issues'][severity]['data'].append(res)\n\t\t\t\t\tself.results['Issues'][severity]['header'].append(res1)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\t\t elif re.findall(r'\\(.*:.*\\]', str(vers)):\n vers1 = re.findall(r'\\((.*):', str(vers))[0]\n vers2 = re.findall(r':(.*)\\]', str(vers))[0]\n\n if self.gt(vers1, mVer) and self.ltEq(vers2, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n\n\n if res not in self.results['Issues'][severity]['data']:\n self.results['Issues'][severity].append(res)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\n\t\t elif re.findall(r'\\[.*:.*\\)', str(vers)):\n vers1 = re.findall(r'\\[(.*):', str(vers))[0]\n vers2 = re.findall(r':(.*)\\)', str(vers))[0]\n\n if self.gtEq(vers1, mVer) and self.lt(vers2, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n\n\n if res not in self.results['Issues'][severity]['data']:\n\t\t\t\t\tself.results['Issues'][severity]['data'].append(res)\n\t\t\t\t\tself.results['Issues'][severity]['header'].append(res1)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\t\t elif re.findall(r'\\(.*:.*\\)', str(vers)):\n vers1 = re.findall(r'\\((.*):', str(vers))[0]\n vers2 = re.findall(r':(.*)\\)', str(vers))[0]\n\n if self.gt(vers1, mVer) and self.lt(vers2, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n if res not in self.results['Issues'][severity]['data']:\n\t\t\t\t\tself.results['Issues'][severity]['data'].append(res)\n\t\t\t\t\tself.results['Issues'][severity]['header'].append(res1)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\t\t elif re.findall(r'\\(.*:.*\\)', str(vers)):\n vers1 = re.findall(r'\\((.*):', str(vers))[0]\n vers2 = re.findall(r':(.*)\\)', str(vers))[0]\n\n if self.gt(vers1, mVer) and self.lt(vers2, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n\n\n if res not in self.results['Issues'][severity]['data']:\n\t\t\t\t\tself.results['Issues'][severity]['data'].append(res)\n\t\t\t\t\tself.results['Issues'][severity]['header'].append(res1)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\t\t else:\n vers1 = str(vers)\n if self.eq(vers1, mVer):\n res = {}\n if severity not in self.results['Issues']:\n self.results['Issues'][severity] = {}\n\t\t\t\t\tself.results['Issues'][severity]['data'] = []\n\t\t\t\t\tself.results['Issues'][severity]['header'] = []\n\n\t\t\t\tres1 = {}\n res1['CVEID'] = str(cve_id)\n res1['Product'] = str(product)\n res1['CWE'] = str(cwe_text)\n res1['Severity'] = str(severity)\n\n res['Product'] = str(product)\n res['Vendor'] = str(vendor)\n res['Severity'] = str(severity)\n res['CVEID'] = str(cve_id)\n res['Vector String'] = str(vectorString)\n res['Vulnerability Name'] = str(vuln_name)\n res['Patched Version'] = str(patch)\n res['Recommendation'] = str(recommendation)\n res['Reference'] = str(reference)\n res['Publish Date'] = str(pub_date)\n res['Introduced Through'] = str(dependancy)\n res['Installed Version'] = str(mVer)\n\t\t\t\tres['CWE'] = str(cwe_text)\n\n\n if res not in self.results['Issues'][severity]['data']:\n\t\t\t\t\tself.results['Issues'][severity]['data'].append(res)\n\t\t\t\t\tself.results['Issues'][severity]['header'].append(res1)\n\n if severity.lower() == \"medium\" or severity.lower() == \"moderate\":\n self.med.append(\"Medium\")\n if severity.lower() == \"high\":\n self.hig.append(\"High\")\n if severity.lower() == \"low\":\n self.low.append(\"Low\")\n\t\t\t\t\tif severity.lower() == \"critical\":\n self.cri.append(\"Critical\")\n\n self.vuln_found.append(product)\n if product not in self.vuln_depe:\n self.vuln_depe.append(product)\n\n\n\n\tdef getVulnData(self, product, vendor, mVersions, depend):\n for row in self.responseData[\"results\"][\"%s/%s\" % (vendor, product)]:\n cve_id = row['cve_id']\n\t\t\tversions = row['versions']\n\t\t\treference = row['reference']\n\t\t\tvuln_name = row['vuln_name']\n\t\t\tvectorString = row['vectorString']\n\t\t\tbaseScore = row['baseScore']\n\t\t\trecommendation = row['recommendation']\n\t\t\tpub_date = row['pub_date']\n\t\t\tpatch = row['patch']\n\t\t\tseverity = row['severity']\n\t\t\tcwe_text = row['cwe_text']\n\t\t\tself.matchVer(mVersions, product, vendor, cve_id, versions, reference, vuln_name, vectorString, baseScore, recommendation, pub_date, severity, depend, patch, cwe_text)\n\n\n\tdef getInstallPkgList(self):\n\t\tself.installPackageLists = []\n\t\tself.resultsPkg = {}\n\n\t\tfor file in glob2.glob('%s/**/composer.*' % (self.sourcefolder), recursive=True):\n\t\t\tfile = os.path.abspath(file)\n\t\t\tfilename = os.path.basename(file)\n\n\t\t\tif 'files' not in self.resultsPkg:\n \tself.resultsPkg['files'] = {}\n\n\t\t\tif filename == \"composer.lock\":\n\t\t\t if os.stat(file).st_size != 0:\n\t\t\t \twith open(file) as f:\n\t\t\t\t data = json.load(f)\n\n\t\t\t\tif filename not in self.resultsPkg['files']:\n\t\t\t \t\tself.resultsPkg['files'][filename] = {}\n\n\t\t\t\tself.resultsPkg['files'][filename][file] = {}\n\n\t\t\t\tif 'packages' in data:\n\t\t\t for pkg in data['packages']:\n\t\t\t\t package_name = pkg['name']\n\n\t\t \t\t if \"/\" in package_name:\n\t\t\t\t\t if package_name not in self.installPackageLists:\n\t\t\t\t\t\tself.installPackageLists.append(package_name)\n\n\t\t\t\t\t vendor = package_name.split(\"/\")[0]\n\t\t\t\t\t product = package_name.split(\"/\")[1]\n\t\t\t\t\t versions = pkg['version']\n\n\t\t\t\t\t if package_name not in self.resultsPkg['files'][filename][file]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(package_name)] = {}\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(package_name)][\"product\"] = str(product)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(package_name)][\"vendor\"] = str(vendor)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(package_name)][\"version\"] = []\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(package_name)][\"depend\"] = []\n\n\t\t\t\t\t if versions not in self.resultsPkg['files'][filename][file][package_name][\"version\"]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][package_name][\"version\"].append(str(versions))\n\n\t\t\t\t\t if 'require' in pkg:\n\t\t\t\t\t for d in pkg['require']:\n\t\t\t\t\t\t if \"/\" in d:\n\t\t\t\t\t\t\tif d not in self.installPackageLists:\n\t\t\t\t\t\t\t\tself.installPackageLists.append(d)\n\n\t\t\t\t\t\t\tvendor1 = d.split(\"/\")[0]\n\t\t\t\t\t\t\tproduct1 = d.split(\"/\")[1]\n\t\t\t\t\t\t\tversions1 = pkg['require'][d]\n\n\t\t\t\t\t\t\tif d not in self.resultsPkg['files'][filename][file]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)] = {}\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"product\"] = str(product1)\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"vendor\"] = str(vendor1)\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"] = []\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"] = []\n\n\t\t\t\t\t\t\tif versions1 not in self.resultsPkg['files'][filename][file][d][\"version\"]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"].append(str(versions1))\n\n\t\t\t\t\t\t\tif \"%s@%s\" % (str(package_name), str(versions)) not in self.resultsPkg['files'][filename][file][d][\"depend\"]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"].append(\"%s@%s\" % (str(package_name), str(versions)))\n\n\t\t\t\t\t if 'require-dev' in pkg:\n\t\t\t\t\t for d in pkg['require-dev']:\n\t\t\t\t\t\t if \"/\" in d:\n\t\t\t\t\t\t\tif d not in self.installPackageLists:\n\t\t\t\t\t\t\t\tself.installPackageLists.append(d)\n\n\t\t\t\t\t\t\tvendor2 = d.split(\"/\")[0]\n\t\t\t\t\t\t\tproduct2 = d.split(\"/\")[1]\n\t\t\t\t\t\t\tversions2 = pkg['require-dev'][d]\n\n\t\t\t\t\t\t\tif d not in self.resultsPkg['files'][filename][file]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)] = {}\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"product\"] = str(product2)\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"vendor\"] = str(vendor2)\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"] = []\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"] = []\n\n\t\t\t\t\t\t\tif versions2 not in self.resultsPkg['files'][filename][file][d][\"version\"]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"].append(str(versions2))\n\n\t\t\t\t\t\t\tif \"%s@%s\" % (str(package_name), str(versions)) not in self.resultsPkg['files'][filename][file][d][\"depend\"]:\n\t\t\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"].append(\"%s@%s\" % (str(package_name), str(versions)))\n\n\n\n\t\t\tif filename == \"composer.json\":\n\t\t\t if os.stat(file).st_size != 0:\n\t\t\t with open(file) as f:\n\t\t\t\t data = json.load(f)\n\n\t\t\t\tif filename not in self.resultsPkg['files']:\n\t\t\t \tself.resultsPkg['files'][filename] = {}\n\n\t\t\t\tself.resultsPkg['files'][filename][file] = {}\n\n\n\t\t\t if 'require' in data:\n\t\t\t \t for d in data['require']:\n\t\t \t\t if \"/\" in d:\n\t\t\t\t\t if d not in self.installPackageLists:\n\t\t\t\t\t\tself.installPackageLists.append(d)\n\n\t\t\t\t\t vendor3 = d.split(\"/\")[0]\n\t\t\t\t\t product3 = d.split(\"/\")[1]\n\t\t\t\t\t versions3 = data['require'][d]\n\t\t\t\t\t\n\t\t\t\t\t if d not in self.resultsPkg['files'][filename][file]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)] = {}\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"product\"] = str(product3)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"vendor\"] = str(vendor3)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"] = []\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"] = []\n\n\t\t\t\t\t if str(versions3) not in self.resultsPkg['files'][filename][file][d][\"version\"]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"].append(str(versions3))\n\n\n\t\t\t if 'require-dev' in data:\n\t\t\t \t for d in data['require-dev']:\n\t\t \t\t if \"/\" in d:\n\t\t\t\t\t if d not in self.installPackageLists:\n\t\t\t\t\t\tself.installPackageLists.append(d)\n\n\t\t\t\t\t vendor4 = d.split(\"/\")[0]\n\t\t\t\t\t product4 = d.split(\"/\")[1]\n\t\t\t\t\t versions4 = data['require-dev'][d]\n\t\t\t\t\t\n\t\t\t\t\t if d not in self.resultsPkg['files'][filename][file]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)] = {}\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"product\"] = str(product4)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"vendor\"] = str(vendor4)\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"] = []\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"depend\"] = []\n\t\t\t\n\t\t\t\t\t if str(versions4) not in self.resultsPkg['files'][filename][file][d][\"version\"]:\n\t\t\t\t\t\tself.resultsPkg['files'][filename][file][str(d)][\"version\"].append(str(versions4))\n\n\n\t\treturn self.resultsPkg\n\t\t\n\t\t\t\n\n\tdef getUnique(self, lists):\n\t\tunique_list = [] \n\t\tfor x in lists:\n\t\t\tif x not in unique_list:\n\t\t\t\tunique_list.append(x)\n\t\treturn unique_list\n\n\tdef scanComposerPackage(self):\n\t\tprint \"[ OK ] Preparing..., It's take time to completed.\"\n\t\toutput = self.getInstallPkgList()\n\t\tprint \"[ OK ] Database sync started\"\n\t\tself.syncData(self.installPackageLists)\n\t\tprint \"[ OK ] Database sync comleted\"\n\t\tself.med = []\n self.hig = []\n self.low = []\n self.cri = []\n\t\tprint \"[ OK ] Scanning started\"\n\n\t\tself.results['Issues'] = {}\n\t\tself.results['files'] = {}\n\n\t\tfor filename in output['files']:\n\t\t\tprint \"[ OK ] Started %s file processing\" % filename\n\t\t\tif filename not in self.testedWith:\n\t\t\t\tself.testedWith.append(filename)\n\t\t\tif filename not in self.results['files']:\n\t\t\t\tself.results['files'][filename] = {}\n\t\t\t\tself.results['files'][filename]['packages'] = []\n\t\t\tprint \"There are total %s %s files are processing\" % (filename, len(output['files'][filename]))\n\t\t\tfor file in output['files'][filename]:\n\t\t\t\tprint \"File %s Scanning Started\" % file\n\t\t\t\tfor d in tqdm(output['files'][filename][file]):\n\t\t\t\t\tvendor = output['files'][filename][file][d]['vendor']\n\t\t\t\t\tproduct = output['files'][filename][file][d]['product']\n\t\t\t\t\tversion = output['files'][filename][file][d]['version']\n\t\t\t\t\tdepend = output['files'][filename][file][d]['depend']\n\t\t\t\t\tif product not in self.dependanciesCount:\n\t\t\t\t\t\tself.dependanciesCount.append(product)\n\t\t\t\t\tself.getVulnData(product, vendor, version[0], ','.join(depend))\n\n\t\t\t\t\tres = {}\n res['product'] = product\n res['version'] = version\n res['file'] = file\n\t\t\t\t\tres['Dependencies'] = ','.join(depend)\n self.results['files'][filename]['packages'].append(res)\n\t\n\n\t\tprint \"[ OK ] Scanning Completed\"\n\n\t\tself.results['header']['Tested With'] = ','.join(self.testedWith)\n self.results['header']['Severity'] = {}\n self.results['header']['Total Scanned Dependancies'] = len(self.dependanciesCount)\n self.results['header']['Total Unique Vulnerabilities'] = len(self.vuln_found)\n self.results['header']['Total Vulnerable Dependencies'] = len(self.getUnique(self.vuln_depe))\n self.results['header']['Severity']['Low'] = len(self.low)\n self.results['header']['Severity']['High'] = len(self.hig)\n self.results['header']['Severity']['Medium'] = len(self.med)\n self.results['header']['Severity']['Critical'] = len(self.cri)\n\n\t\twith open(\"%s/%s.json\" % (self.report_path, self.report_name), \"w\") as f:\n\t\t\tjson.dump(self.results, f)\n\t\t\n\t\tprint \"[ OK ] Vulnerabilities Report ready - %s/%s\" % (self.report_path, self.report_name)\n\n\t\turl = \"%s://%s:%s/api/report-upload/language/%s\" % (self.protocol, self.server, self.port, self.tokenId)\n fin = open('%s/%s.json' % (self.report_path, self.report_name), 'rb')\n files = {'file': fin}\n response = requests.post(url, files = files)\n\n if response.status_code == 201:\n print \"[ OK ] Report Uploaded on server\"\n else:\n print \"[ ERROR ] Report Upload Error\"\n\n\n\tdef syncData(self, productLists):\n\t try:\n url = \"%s://%s:%s/api/scanDetailsVendor/composer\" % (self.protocol, self.server, self.port)\n headers = {\n 'Authorization': 'Basic QWRtaW5pc3RyYXRvcjpWZXJzYUAxMjM=',\n 'Content-Type': 'application/json'\n }\n payload = \"{\\\"data\\\": \\\"\"+ ','.join(productLists) + \"\\\"}\"\n\n response = requests.request(\"POST\", url, headers=headers, data = payload)\n responseData = response.json()\n self.responseData = responseData\n except:\n print \"[ OK ] Database sync error! Check internet connectivity\"\n sys.exit(1)\n\n\n\tdef query_yes_no(self, question, default=\"yes\"):\n valid = {\"yes\": True, \"y\": True, \"ye\": True,\n \"no\": False, \"n\": False}\n if default is None:\n prompt = \" [y/n] \"\n elif default == \"yes\":\n prompt = \" [Y/n] \"\n elif default == \"no\":\n prompt = \" [y/N] \"\n else:\n raise ValueError(\"invalid default answer: '%s'\" % default)\n\n while True:\n sys.stdout.write(question + prompt)\n choice = raw_input().lower()\n if default is not None and choice == '':\n return valid[default]\n elif choice in valid:\n return valid[choice]\n else:\n sys.stdout.write(\"Please respond with 'yes' or 'no' \"\n \"(or 'y' or 'n').\\n\")\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n\n parser.add_argument('-r', '--reportPath', type=str, help='Enter Report Path', required=True)\n parser.add_argument('-n', '--projectname', type=str, help='Enter Project Name', required=True)\n parser.add_argument('-t', '--target', type=str, help='Enter target source folder', required=True)\n parser.add_argument('-o', '--owner', type=str, help='Enter project owner')\n\n parser.add_argument('-v', '--version', action='version',\n version='%(prog)s 1.0')\n\n results = parser.parse_args()\n\n if not results.owner:\n owner = \"Unknow\"\n else:\n owner = results.owner\n\n data = \"\"\"\n GNU GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\n Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n Preamble\n\n The GNU General Public License is a free, copyleft license for\nsoftware and other kinds of works.\n\n The licenses for most software and other practical works are designed\nto take away your freedom to share and change the works. By contrast,\nthe GNU General Public License is intended to guarantee your freedom to\nshare and change all versions of a program--to make sure it remains free\nsoftware for all its users. We, the Free Software Foundation, use the\nGNU General Public License for most of our software; it applies also to\nany other work released this way by its authors. You can apply it to\nyour programs, too.\n\n When we speak of free software, we are referring to freedom, not\nprice. Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthem if you wish), that you receive source code or can get it if you\nwant it, that you can change the software or use pieces of it in new\nfree programs, and that you know you can do these things.\n\n To protect your rights, we need to prevent others from denying you\nthese rights or asking you to surrender the rights. Therefore, you have\ncertain responsibilities if you distribute copies of the software, or if\nyou modify it: responsibilities to respect the freedom of others.\n\n For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must pass on to the recipients the same\nfreedoms that you received. You must make sure that they, too, receive\nor can get the source code. And you must show them these terms so they\nknow their rights.\n\n Developers that use the GNU GPL protect your rights with two steps:\n(1) assert copyright on the software, and (2) offer you this License\ngiving you legal permission to copy, distribute and/or modify it.\n\n For the developers' and authors' protection, the GPL clearly explains\nthat there is no warranty for this free software. For both users' and\nauthors' sake, the GPL requires that modified versions be marked as\nchanged, so that their problems will not be attributed erroneously to\nauthors of previous versions.\n\n Some devices are designed to deny users access to install or run\nmodified versions of the software inside them, although the m\n\nDo you want to accept ?\n \"\"\"\n res = getComposerVulnerabilities(results.reportPath, results.projectname, results.target, owner)\n\n if res.query_yes_no(data):\n res.scanComposerPackage()\n else:\n sys.exit(1)\n\n"
] | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.