project
stringlengths
1
98
commit_sha
stringlengths
40
40
parent_sha
stringlengths
40
40
file_path
stringlengths
4
209
project_url
stringlengths
23
132
likely_bug
bool
1 class
comodified
bool
1 class
in_function
bool
2 classes
diff
stringlengths
27
9.71k
before
stringlengths
1
8.91k
after
stringlengths
1
6k
sstub_pattern
stringclasses
23 values
edit_script
stringlengths
33
158k
key
stringlengths
45
154
commit_message
stringlengths
3
65.5k
files
list
udi-wirelesstag-poly
18375b72c17ec41943cfa92434a3bcd530a68afc
f909a8cea04dca9f4902afc94d0f31bed9ee51fd
wt_nodes/wTagManager.py
https://github.com/jimboca/udi-wirelesstag-poly
true
false
true
@@ -90,7 +90,7 @@ class wTagManager(polyinterface.Node): mgd = self.controller.wtServer.GetTagList(self.mac) if mgd['st']: - self.set_st(False) + self.set_st(True) for tag in mgd['result']: tag_o = self.get_tag_by_id(tag['slaveId']) if tag_o is None:
self . set_st ( False )
self . set_st ( True )
CHANGE_BOOLEAN_LITERAL
[["Insert", ["argument_list", 2, 24, 2, 31], ["true:True", "T"], 1], ["Delete", ["false:False", 2, 25, 2, 30]]]
jimboca/udi-wirelesstag-poly@18375b72c17ec41943cfa92434a3bcd530a68afc
st fix
[ { "sha": "2064e41b6cd3233c53879e48edd15af64d3723d2", "filename": "wt_nodes/wTagManager.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/jimboca/udi-wirelesstag-poly/blob/18375b72c17ec41943cfa92434a3bcd530a68afc/wt_nodes%2FwTagManager.py", "raw_url": "https://github.com/jimboca/udi-wirelesstag-poly/raw/18375b72c17ec41943cfa92434a3bcd530a68afc/wt_nodes%2FwTagManager.py", "contents_url": "https://api.github.com/repos/jimboca/udi-wirelesstag-poly/contents/wt_nodes%2FwTagManager.py?ref=18375b72c17ec41943cfa92434a3bcd530a68afc", "patch": "@@ -90,7 +90,7 @@ def query(self):\n \"\"\"\n mgd = self.controller.wtServer.GetTagList(self.mac)\n if mgd['st']:\n- self.set_st(False)\n+ self.set_st(True)\n for tag in mgd['result']:\n tag_o = self.get_tag_by_id(tag['slaveId'])\n if tag_o is None:" } ]
udi-wirelesstag-poly
525a9c9438b161c37e2f14abbecedd5fe4bb7e12
771fd3123b5dea63c54c85fe08eabef84088c7c2
wtServer.py
https://github.com/jimboca/udi-wirelesstag-poly
true
false
true
@@ -269,7 +269,7 @@ class wtSession(): return self.tmgr_mac_st def post(self,path,payload,use_token=True): - url = "https://wirelesstag.net/{}".format(path) + url = "https://www.mytaglist.com/{}".format(path) self.l_debug('post',"Sending: url={0} payload={1}".format(url,payload)) if use_token: access_token = self.wtServer.get_access_token()
url = "https://wirelesstag.net/{}" . format ( path )
url = "https://www.mytaglist.com/{}" . format ( path )
CHANGE_STRING_LITERAL
[["Update", ["string:\"https://wirelesstag.net/{}\"", 3, 15, 3, 43], "\"https://www.mytaglist.com/{}\""]]
jimboca/udi-wirelesstag-poly@525a9c9438b161c37e2f14abbecedd5fe4bb7e12
Fix host
[ { "sha": "f404b2e1e7a0bea69f24b9c22a75b6412f25da47", "filename": "wtServer.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/jimboca/udi-wirelesstag-poly/blob/525a9c9438b161c37e2f14abbecedd5fe4bb7e12/wtServer.py", "raw_url": "https://github.com/jimboca/udi-wirelesstag-poly/raw/525a9c9438b161c37e2f14abbecedd5fe4bb7e12/wtServer.py", "contents_url": "https://api.github.com/repos/jimboca/udi-wirelesstag-poly/contents/wtServer.py?ref=525a9c9438b161c37e2f14abbecedd5fe4bb7e12", "patch": "@@ -269,7 +269,7 @@ def select_tag_manager(self,force=False):\n return self.tmgr_mac_st\n \n def post(self,path,payload,use_token=True):\n- url = \"https://wirelesstag.net/{}\".format(path)\n+ url = \"https://www.mytaglist.com/{}\".format(path)\n self.l_debug('post',\"Sending: url={0} payload={1}\".format(url,payload))\n if use_token:\n access_token = self.wtServer.get_access_token()" } ]
udi-wirelesstag-poly
7367bdeb799a4e82221fd978e396f055884d796e
18974c3ab9ffa3e5a4ad14058bc3ef879a5a25ca
wt_nodes/wTag.py
https://github.com/jimboca/udi-wirelesstag-poly
true
false
true
@@ -435,7 +435,7 @@ class wTag(polyinterface.Node): def get_lit(self): self.l_debug('get_lit','') - self.getDriver('GV7') + return self.getDriver('GV7') def set_fan(self,value): self.l_debug('set_fan','{0}'.format(value))
self . getDriver ( 'GV7' )
return self . getDriver ( 'GV7' )
SINGLE_STMT
[["Insert", ["block", 2, 9, 3, 30], ["return_statement", "N0"], 1], ["Insert", "N0", ["return:return", "T"], 0], ["Move", "N0", ["call", 3, 9, 3, 30], 1], ["Delete", ["expression_statement", 3, 9, 3, 30]]]
jimboca/udi-wirelesstag-poly@7367bdeb799a4e82221fd978e396f055884d796e
2.0.20 Fix crash caused by Set Light Command
[ { "sha": "d5d08a724831ea6dc33c6bc76a8c60b1ada23678", "filename": "wt_nodes/wTag.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/jimboca/udi-wirelesstag-poly/blob/7367bdeb799a4e82221fd978e396f055884d796e/wt_nodes%2FwTag.py", "raw_url": "https://github.com/jimboca/udi-wirelesstag-poly/raw/7367bdeb799a4e82221fd978e396f055884d796e/wt_nodes%2FwTag.py", "contents_url": "https://api.github.com/repos/jimboca/udi-wirelesstag-poly/contents/wt_nodes%2FwTag.py?ref=7367bdeb799a4e82221fd978e396f055884d796e", "patch": "@@ -435,7 +435,7 @@ def set_lit(self,value):\n \n def get_lit(self):\n self.l_debug('get_lit','')\n- self.getDriver('GV7')\n+ return self.getDriver('GV7')\n \n def set_fan(self,value):\n self.l_debug('set_fan','{0}'.format(value))" } ]
udi-wirelesstag-poly
11d63839d657e6a16ceab4aaa830f09382f0c3ec
347c7b38413e5a3c98d78ec5b0cbf39a636bb9e2
wt_nodes/wTag.py
https://github.com/jimboca/udi-wirelesstag-poly
true
false
true
@@ -457,7 +457,7 @@ class wTag(polyinterface.Node): self.setDriver('BATLVL', myfloat(value,2)) def set_batv(self,value): - self.l_debug('set_batp','{0}'.format(myfloat(value,3))) + self.l_debug('set_batv','{0}'.format(myfloat(value,3))) self.setDriver('CV', myfloat(value,3)) def set_batl(self,value,force=False):
self . l_debug ( 'set_batp' , '{0}' . format ( myfloat ( value , 3 ) ) )
self . l_debug ( 'set_batv' , '{0}' . format ( myfloat ( value , 3 ) ) )
CHANGE_STRING_LITERAL
[["Update", ["string:'set_batp'", 3, 22, 3, 32], "'set_batv'"]]
jimboca/udi-wirelesstag-poly@11d63839d657e6a16ceab4aaa830f09382f0c3ec
more debug
[ { "sha": "d829e7089a048fcc5fdae82148066ba54ad0a324", "filename": "wt_nodes/wTag.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/jimboca/udi-wirelesstag-poly/blob/11d63839d657e6a16ceab4aaa830f09382f0c3ec/wt_nodes%2FwTag.py", "raw_url": "https://github.com/jimboca/udi-wirelesstag-poly/raw/11d63839d657e6a16ceab4aaa830f09382f0c3ec/wt_nodes%2FwTag.py", "contents_url": "https://api.github.com/repos/jimboca/udi-wirelesstag-poly/contents/wt_nodes%2FwTag.py?ref=11d63839d657e6a16ceab4aaa830f09382f0c3ec", "patch": "@@ -457,7 +457,7 @@ def set_batp(self,value,force=False):\n self.setDriver('BATLVL', myfloat(value,2))\n \n def set_batv(self,value):\n- self.l_debug('set_batp','{0}'.format(myfloat(value,3)))\n+ self.l_debug('set_batv','{0}'.format(myfloat(value,3)))\n self.setDriver('CV', myfloat(value,3))\n \n def set_batl(self,value,force=False):" } ]
matplotlib
fd0807266fbf39bdba8be9280b6060cca5cca551
55bc1aadf389e010e3abc757ebece2730526410d
examples/simple_plot.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -2,7 +2,7 @@ from matplotlib.matlab import * t = arange(0.0, 2.0, 0.01) s = sin(2*pi*t) -plot(t, s) +plot(t, s, 'o') xlabel('time (s)') ylabel('voltage (mV)') title('About as simple as it gets, folks')
plot ( t , s )
plot ( t , s , 'o' )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 5, 3, 11], [",:,", "T"], 4], ["Insert", ["argument_list", 3, 5, 3, 11], ["string:'o'", "T"], 5]]
dsquareindia/matplotlib@fd0807266fbf39bdba8be9280b6060cca5cca551
null
null
matplotlib
67a7ee84d7d58c50efdb6793358628f94bc2b99b
4d83fa758dafb3b369258769dd06de588df5e730
examples/pylab_examples/figlegend_demo.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -12,7 +12,7 @@ l1, l2 = ax1.plot(x, y1, 'rs-', x, y2, 'go') y3 = np.sin(4*np.pi*x) y4 = np.exp(-2*x) -l3, l4 = ax2.plot(x, y3, 'yd-', x, y3, 'k^') +l3, l4 = ax2.plot(x, y3, 'yd-', x, y4, 'k^') fig.legend((l1, l2), ('Line 1', 'Line 2'), 'upper left') fig.legend((l3, l4), ('Line 3', 'Line 4'), 'upper right')
l3 , l4 = ax2 . plot ( x , y3 , 'yd-' , x , y3 , 'k^' )
l3 , l4 = ax2 . plot ( x , y3 , 'yd-' , x , y4 , 'k^' )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:y3", 3, 36, 3, 38], "y4"]]
dsquareindia/matplotlib@67a7ee84d7d58c50efdb6793358628f94bc2b99b
null
null
matplotlib
c0995e51e9c71a9b912e736075481319796694bd
b248596e65ca30250ed73e31c535fe3b71126135
doc/pyplots/annotate_transform.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -11,7 +11,7 @@ ax.set_xlim(0, 10) ax.set_ylim(-1, 1) xdata, ydata = 5, 0 -xdisplay, ydisplay = ax.transData.transform((xdata, ydata)) +xdisplay, ydisplay = ax.transData.transform_point((xdata, ydata)) bbox = dict(boxstyle="round", fc="0.8") arrowprops = dict(
xdisplay , ydisplay = ax . transData . transform ( ( xdata , ydata ) )
xdisplay , ydisplay = ax . transData . transform_point ( ( xdata , ydata ) )
WRONG_FUNCTION_NAME
[["Update", ["identifier:transform", 3, 35, 3, 44], "transform_point"]]
dsquareindia/matplotlib@c0995e51e9c71a9b912e736075481319796694bd
null
null
matplotlib
1a7e2fb053b0febf2b909dc04129794488e7b0a9
0367bbfb778bc7df31dcf82554ab87947d71206f
lib/mpl_toolkits/mplot3d/axes3d.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -2565,7 +2565,7 @@ class Axes3D(Axes): # If any row of UVW is all zeros, don't make a quiver for it mask = norm > 1e-10 XYZ = XYZ[mask] - UVW = UVW[mask] / norm[mask, np.newaxis] + UVW = UVW[mask] / norm[mask].reshape((-1, 1)) if len(XYZ) > 0: # compute the shaft lines all at once with an outer product
UVW = UVW [ mask ] / norm [ mask , np . newaxis ]
UVW = UVW [ mask ] / norm [ mask ] . reshape ( ( - 1 , 1 ) )
CHANGE_BINARY_OPERAND
[["Insert", ["binary_operator", 3, 15, 3, 49], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["subscript", "N3"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:reshape", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["tuple", "N4"], 1], ["Insert", "N2", ["):)", "T"], 2], ["Move", "N3", ["identifier:norm", 3, 27, 3, 31], 0], ["Move", "N3", ["[:[", 3, 31, 3, 32], 1], ["Move", "N3", ["identifier:mask", 3, 32, 3, 36], 2], ["Insert", "N3", ["]:]", "T"], 3], ["Insert", "N4", ["(:(", "T"], 0], ["Insert", "N4", ["unary_operator", "N5"], 1], ["Insert", "N4", [",:,", "T"], 2], ["Insert", "N4", ["integer:1", "T"], 3], ["Insert", "N4", ["):)", "T"], 4], ["Insert", "N5", ["-:-", "T"], 0], ["Insert", "N5", ["integer:1", "T"], 1], ["Delete", [",:,", 3, 36, 3, 37]], ["Delete", ["identifier:np", 3, 38, 3, 40]], ["Delete", [".:.", 3, 40, 3, 41]], ["Delete", ["identifier:newaxis", 3, 41, 3, 48]], ["Delete", ["attribute", 3, 38, 3, 48]], ["Delete", ["]:]", 3, 48, 3, 49]], ["Delete", ["subscript", 3, 27, 3, 49]]]
dsquareindia/matplotlib@1a7e2fb053b0febf2b909dc04129794488e7b0a9
null
null
matplotlib
7ef79196fb16e28a3cc91b63b8be832e4f22e9bb
313e946511b822f040579dc6faaa96910c131328
lib/matplotlib/__init__.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -107,7 +107,7 @@ import sys import distutils.version from itertools import chain -__version__ = str('1.4.x') +__version__ = str('1.4.3rc1') __version__numpy__ = str('1.6') # minimum required numpy version try:
__version__ = str ( '1.4.x' )
__version__ = str ( '1.4.3rc1' )
CHANGE_STRING_LITERAL
[["Update", ["string:'1.4.x'", 3, 19, 3, 26], "'1.4.3rc1'"]]
dsquareindia/matplotlib@7ef79196fb16e28a3cc91b63b8be832e4f22e9bb
null
null
ssw555project
715f90891a06dcac0672a5668a29a893beaf7f91
c48bf12b559dd1aa936b4d8e91bcbf8d58fe904e
ged.py
https://github.com/maxmelo/ssw555project
true
false
false
@@ -394,6 +394,6 @@ if __name__ == "__main__": print('Lists:') for l in lists: print(' ' + l[0]) - if len(l[1]) > 0: + if len(l) == 2 and len(l[1]) > 0: for item in l[1]: print(' ' + str(item)) \ No newline at end of file
if len ( l [ 1 ] ) > 0 : for item in l [ 1 ] : print ( ' ' + str ( item ) ) No newline at end of file
if len ( l ) == 2 and len ( l [ 1 ] ) > 0 : for item in l [ 1 ] : print ( ' ' + str ( item ) ) No newline at end of file
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 17, 6, 27], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Move", "N0", ["comparison_operator", 3, 20, 3, 33], 2], ["Insert", "N1", ["call", "N2"], 0], ["Insert", "N1", ["==:==", "T"], 1], ["Insert", "N1", ["integer:2", "T"], 2], ["Insert", "N2", ["identifier:len", "T"], 0], ["Insert", "N2", ["argument_list", "N3"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["identifier:l", "T"], 1], ["Insert", "N3", ["):)", "T"], 2]]
maxmelo/ssw555project@715f90891a06dcac0672a5668a29a893beaf7f91
fixed case where list story errored with no items in list
[ { "sha": "f0a568ae24aa9f6f0fe711e95aa6621ef7fa8493", "filename": "ged.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/maxmelo/ssw555project/blob/715f90891a06dcac0672a5668a29a893beaf7f91/ged.py", "raw_url": "https://github.com/maxmelo/ssw555project/raw/715f90891a06dcac0672a5668a29a893beaf7f91/ged.py", "contents_url": "https://api.github.com/repos/maxmelo/ssw555project/contents/ged.py?ref=715f90891a06dcac0672a5668a29a893beaf7f91", "patch": "@@ -394,6 +394,6 @@ def getopts(argv):\n print('Lists:')\r\n for l in lists:\r\n print(' ' + l[0])\r\n- if len(l[1]) > 0:\r\n+ if len(l) == 2 and len(l[1]) > 0:\r\n for item in l[1]:\r\n print(' ' + str(item))\n\\ No newline at end of file" } ]
keras-retinanet
e48f77ac5d8523919a3145fa6bcfe177e7ff9896
0c450f10da355207440bb9256087be1d7a485e1a
keras_retinanet/layers/_focal_loss.py
https://github.com/tickleliu/keras-retinanet
true
false
true
@@ -5,7 +5,7 @@ import keras_retinanet.backend class FocalLoss(keras.layers.Layer): - def __init__(self, num_classes=21, alpha=0.25, gamma=2.0, *args, **kwargs): + def __init__(self, num_classes=21, alpha=0.75, gamma=2.0, *args, **kwargs): self.num_classes = num_classes self.alpha = alpha self.gamma = gamma
def __init__ ( self , num_classes = 21 , alpha = 0.25 , gamma = 2.0 , * args , ** kwargs ) : self . num_classes = num_classes self . alpha = alpha self . gamma = gamma
def __init__ ( self , num_classes = 21 , alpha = 0.75 , gamma = 2.0 , * args , ** kwargs ) : self . num_classes = num_classes self . alpha = alpha self . gamma = gamma
CHANGE_NUMERIC_LITERAL
[["Update", ["float:0.25", 1, 43, 1, 47], "0.75"]]
tickleliu/keras-retinanet@e48f77ac5d8523919a3145fa6bcfe177e7ff9896
Set alpha to 0.75 (fault in paper?).
[ { "sha": "69009fe92d06a38dee8f23b06a5316ac565be7de", "filename": "keras_retinanet/layers/_focal_loss.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/tickleliu/keras-retinanet/blob/e48f77ac5d8523919a3145fa6bcfe177e7ff9896/keras_retinanet%2Flayers%2F_focal_loss.py", "raw_url": "https://github.com/tickleliu/keras-retinanet/raw/e48f77ac5d8523919a3145fa6bcfe177e7ff9896/keras_retinanet%2Flayers%2F_focal_loss.py", "contents_url": "https://api.github.com/repos/tickleliu/keras-retinanet/contents/keras_retinanet%2Flayers%2F_focal_loss.py?ref=e48f77ac5d8523919a3145fa6bcfe177e7ff9896", "patch": "@@ -5,7 +5,7 @@\n As described in https://arxiv.org/abs/1708.02002\n \"\"\"\n class FocalLoss(keras.layers.Layer):\n-\tdef __init__(self, num_classes=21, alpha=0.25, gamma=2.0, *args, **kwargs):\n+\tdef __init__(self, num_classes=21, alpha=0.75, gamma=2.0, *args, **kwargs):\n \t\tself.num_classes = num_classes\n \t\tself.alpha = alpha\n \t\tself.gamma = gamma" } ]
incubator-superset
aa95e03eb9305c81118876f05f351709daf4cb98
cf1d0f38ad094d7809b500e2eeae39560111e626
superset/connectors/druid/models.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -492,7 +492,7 @@ class DruidDatasource(Model, BaseDatasource): lbound = datetime(1901, 1, 1).isoformat()[:10] rbound = datetime(2050, 1, 1).isoformat()[:10] if not self.version_higher(self.cluster.druid_version, '0.8.2'): - rbound = datetime.now().isoformat()[:10] + rbound = datetime.now().isoformat() try: segment_metadata = client.segment_metadata( datasource=self.datasource_name,
rbound = datetime . now ( ) . isoformat ( ) [ : 10 ]
rbound = datetime . now ( ) . isoformat ( )
SINGLE_STMT
[["Move", ["assignment", 3, 17, 3, 57], ["call", 3, 26, 3, 52], 2], ["Delete", ["[:[", 3, 52, 3, 53]], ["Delete", [":::", 3, 53, 3, 54]], ["Delete", ["integer:10", 3, 54, 3, 56]], ["Delete", ["slice", 3, 53, 3, 56]], ["Delete", ["]:]", 3, 56, 3, 57]], ["Delete", ["subscript", 3, 26, 3, 57]]]
xunyuw/incubator-superset@aa95e03eb9305c81118876f05f351709daf4cb98
Fix the segment interval for pulling metadata (#3174) The end of the interval would be on the truncated today date, which means that you will exclude today. If your realtime ingestion job runs shorter than a day, the metadata cannot be pulled from the druid cluster.
[ { "sha": "69f10c75f596be81cb83896a446cc26f25d89f7d", "filename": "superset/connectors/druid/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/aa95e03eb9305c81118876f05f351709daf4cb98/superset%2Fconnectors%2Fdruid%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/aa95e03eb9305c81118876f05f351709daf4cb98/superset%2Fconnectors%2Fdruid%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fdruid%2Fmodels.py?ref=aa95e03eb9305c81118876f05f351709daf4cb98", "patch": "@@ -492,7 +492,7 @@ def latest_metadata(self):\n lbound = datetime(1901, 1, 1).isoformat()[:10]\n rbound = datetime(2050, 1, 1).isoformat()[:10]\n if not self.version_higher(self.cluster.druid_version, '0.8.2'):\n- rbound = datetime.now().isoformat()[:10]\n+ rbound = datetime.now().isoformat()\n try:\n segment_metadata = client.segment_metadata(\n datasource=self.datasource_name," } ]
incubator-superset
dfea8df7c9c3d469fcf5605e71c803a9ceb5cc69
a5320a0f3729f0513cf88a548d5fbd1bf59aa506
superset/cli.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -195,7 +195,7 @@ def worker(workers): CELERYD_CONCURRENCY=config.get("SUPERSET_CELERY_WORKERS")) worker = celery_app.Worker(optimization='fair') - worker.run() + worker.start() @manager.option(
worker . run ( )
worker . start ( )
WRONG_FUNCTION_NAME
[["Update", ["identifier:run", 3, 12, 3, 15], "start"]]
xunyuw/incubator-superset@dfea8df7c9c3d469fcf5605e71c803a9ceb5cc69
Fix celery worker (#3278)
[ { "sha": "c5dea446efc6ac96bde3306d94c5592999443433", "filename": "superset/cli.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/dfea8df7c9c3d469fcf5605e71c803a9ceb5cc69/superset%2Fcli.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/dfea8df7c9c3d469fcf5605e71c803a9ceb5cc69/superset%2Fcli.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fcli.py?ref=dfea8df7c9c3d469fcf5605e71c803a9ceb5cc69", "patch": "@@ -195,7 +195,7 @@ def worker(workers):\n CELERYD_CONCURRENCY=config.get(\"SUPERSET_CELERY_WORKERS\"))\n \n worker = celery_app.Worker(optimization='fair')\n- worker.run()\n+ worker.start()\n \n \n @manager.option(" } ]
incubator-superset
c5b1eb7f5ba4a180ab5af824a23fcf79df1957ac
46d60880ebb0c717e16cd81aef7a9ad15a6d966c
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -47,7 +47,7 @@ setup( 'colorama==0.3.9', 'cryptography==1.9', 'flask==0.12.2', - 'flask-appbuilder==1.9.1', + 'flask-appbuilder==1.9.4', 'flask-cache==0.13.1', 'flask-migrate==2.0.3', 'flask-script==2.0.5',
'flask-appbuilder==1.9.1' ,
'flask-appbuilder==1.9.4' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'flask-appbuilder==1.9.1'", 3, 9, 3, 34], "'flask-appbuilder==1.9.4'"]]
xunyuw/incubator-superset@c5b1eb7f5ba4a180ab5af824a23fcf79df1957ac
Bump fab to 1.9.4 (#3364) Important bug/security fixes detailed here: https://github.com/dpgaspar/Flask-AppBuilder/blob/master/docs/versions.rst
[ { "sha": "0133596835741d9d14e0efa70a455be66d964fbc", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/c5b1eb7f5ba4a180ab5af824a23fcf79df1957ac/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/c5b1eb7f5ba4a180ab5af824a23fcf79df1957ac/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=c5b1eb7f5ba4a180ab5af824a23fcf79df1957ac", "patch": "@@ -47,7 +47,7 @@ def get_git_sha():\n 'colorama==0.3.9',\n 'cryptography==1.9',\n 'flask==0.12.2',\n- 'flask-appbuilder==1.9.1',\n+ 'flask-appbuilder==1.9.4',\n 'flask-cache==0.13.1',\n 'flask-migrate==2.0.3',\n 'flask-script==2.0.5'," } ]
incubator-superset
3dfdde130a88d3a12a6e084adc07d284ed6090e6
255a36c2806f42ad96538d7a456fe020de708b13
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -68,7 +68,7 @@ setup( 'simplejson==3.10.0', 'six==1.10.0', 'sqlalchemy==1.1.9', - 'sqlalchemy-utils==0.32.14', + 'sqlalchemy-utils==0.32.16', 'sqlparse==0.2.3', 'thrift>=0.9.3', 'thrift-sasl>=0.2.1',
'sqlalchemy-utils==0.32.14' ,
'sqlalchemy-utils==0.32.16' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'sqlalchemy-utils==0.32.14'", 3, 9, 3, 36], "'sqlalchemy-utils==0.32.16'"]]
xunyuw/incubator-superset@3dfdde130a88d3a12a6e084adc07d284ed6090e6
setup: Bump sqlalchemy-utils to 0.32.16 (#3405) Now sqlalchemy-utils will make explicit that we are trying decoding the secret with the wrong key instead of a generic UnicodeDecodeError. Fix #2600
[ { "sha": "94104776eb473a4a757e67b7a78c0c01db23d907", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/3dfdde130a88d3a12a6e084adc07d284ed6090e6/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/3dfdde130a88d3a12a6e084adc07d284ed6090e6/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=3dfdde130a88d3a12a6e084adc07d284ed6090e6", "patch": "@@ -68,7 +68,7 @@ def get_git_sha():\n 'simplejson==3.10.0',\n 'six==1.10.0',\n 'sqlalchemy==1.1.9',\n- 'sqlalchemy-utils==0.32.14',\n+ 'sqlalchemy-utils==0.32.16',\n 'sqlparse==0.2.3',\n 'thrift>=0.9.3',\n 'thrift-sasl>=0.2.1'," } ]
incubator-superset
6fe93e18c79eeab470f0297014ad9453552e31aa
c988080990691eceb7a78e6448400a519ba4bb0b
superset/connectors/sqla/models.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -598,7 +598,7 @@ class SqlaTable(Model, BaseDatasource): db_dialect = self.database.get_sqla_engine().dialect for col in table.columns: try: - datatype = "{}".format(col.type).upper() + datatype = "{}".format(col.type.compile(dialect=db_dialect)).upper() except Exception as e: datatype = "UNKNOWN" logging.error(
datatype = "{}" . format ( col . type ) . upper ( )
datatype = "{}" . format ( col . type . compile ( dialect = db_dialect ) ) . upper ( )
ADD_METHOD_CALL
[["Insert", ["argument_list", 3, 39, 3, 49], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 39, 3, 49], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["attribute", 3, 40, 3, 48], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:compile", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["keyword_argument", "N3"], 1], ["Move", "N2", ["):)", 3, 48, 3, 49], 2], ["Insert", "N3", ["identifier:dialect", "T"], 0], ["Insert", "N3", ["=:=", "T"], 1], ["Insert", "N3", ["identifier:db_dialect", "T"], 2]]
xunyuw/incubator-superset@6fe93e18c79eeab470f0297014ad9453552e31aa
Getting datatype with its dialect (#3486)
[ { "sha": "1ffae8585cc1193d7375ab310872485ce79d10d5", "filename": "superset/connectors/sqla/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/6fe93e18c79eeab470f0297014ad9453552e31aa/superset%2Fconnectors%2Fsqla%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/6fe93e18c79eeab470f0297014ad9453552e31aa/superset%2Fconnectors%2Fsqla%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fsqla%2Fmodels.py?ref=6fe93e18c79eeab470f0297014ad9453552e31aa", "patch": "@@ -598,7 +598,7 @@ def fetch_metadata(self):\n db_dialect = self.database.get_sqla_engine().dialect\n for col in table.columns:\n try:\n- datatype = \"{}\".format(col.type).upper()\n+ datatype = \"{}\".format(col.type.compile(dialect=db_dialect)).upper()\n except Exception as e:\n datatype = \"UNKNOWN\"\n logging.error(" } ]
incubator-superset
b866b33dee5bad568f1533f45b3f36ecca9c05be
8994bdacbd5c76c52dc32cd5d700136a12367694
superset/jinja_context.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -43,7 +43,7 @@ def url_param(param, default=None): def current_user_id(): """The id of the user who is currently logged in""" - if g.user: + if hasattr(g, 'user') and g.user: return g.user.id
if g . user : return g . user . id
if hasattr ( g , 'user' ) and g . user : return g . user . id
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 5, 4, 25], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["call", "N1"], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Move", "N0", ["attribute", 3, 8, 3, 14], 2], ["Insert", "N1", ["identifier:hasattr", "T"], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["identifier:g", "T"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["string:'user'", "T"], 3], ["Insert", "N2", ["):)", "T"], 4]]
xunyuw/incubator-superset@b866b33dee5bad568f1533f45b3f36ecca9c05be
[bugfix] Template rendering failed: '_AppCtxGlobals' object has no attribute 'user' (#3637) Somehow the nature of `g` in Flask has changed where `g.user` used to be provided outside the web request scope and its not anymore. The fix here should address that.
[ { "sha": "a019ed382807fa574caa1c647f221a5688fcde67", "filename": "superset/jinja_context.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/b866b33dee5bad568f1533f45b3f36ecca9c05be/superset%2Fjinja_context.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/b866b33dee5bad568f1533f45b3f36ecca9c05be/superset%2Fjinja_context.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fjinja_context.py?ref=b866b33dee5bad568f1533f45b3f36ecca9c05be", "patch": "@@ -43,7 +43,7 @@ def url_param(param, default=None):\n \n def current_user_id():\n \"\"\"The id of the user who is currently logged in\"\"\"\n- if g.user:\n+ if hasattr(g, 'user') and g.user:\n return g.user.id\n \n " } ]
incubator-superset
aee84389240c04ca2e369ab65a1b100f77327bb7
a6ba841e57fb014e4f260271158e383d375b74cf
superset/connectors/sqla/models.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -465,7 +465,7 @@ class SqlaTable(Model, BaseDatasource): # For backwards compatibility and edge cases # where a column data type might have changed if isinstance(v, basestring): - v = v.strip(""").strip(""") + v = v.strip("'").strip('"') if col_obj.is_num: v = utils.string_to_num(v)
v = v . strip ( """).strip(""" )
v = v . strip ( "'" ) . strip ( '"' )
SINGLE_STMT
[["Insert", ["call", 3, 33, 3, 56], ["attribute", "N0"], 0], ["Insert", ["call", 3, 33, 3, 56], ["argument_list", "N1"], 1], ["Insert", "N0", ["call", "N2"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:strip", "T"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["string:'\"'", "T"], 1], ["Insert", "N1", ["):)", "T"], 2], ["Move", "N2", ["attribute", 3, 33, 3, 40], 0], ["Move", "N2", ["argument_list", 3, 40, 3, 56], 1], ["Update", ["string:\"\"\").strip(\"\"\"", 3, 41, 3, 55], "\"'\""]]
xunyuw/incubator-superset@aee84389240c04ca2e369ab65a1b100f77327bb7
Fixing an issue with stripping filter values (#3869)
[ { "sha": "5c2be6fa6751727e48a8a7296ecee4054935e3e0", "filename": "superset/connectors/sqla/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/aee84389240c04ca2e369ab65a1b100f77327bb7/superset%2Fconnectors%2Fsqla%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/aee84389240c04ca2e369ab65a1b100f77327bb7/superset%2Fconnectors%2Fsqla%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fsqla%2Fmodels.py?ref=aee84389240c04ca2e369ab65a1b100f77327bb7", "patch": "@@ -465,7 +465,7 @@ def get_sqla_query( # sqla\n # For backwards compatibility and edge cases\n # where a column data type might have changed\n if isinstance(v, basestring):\n- v = v.strip(\"\"\").strip(\"\"\")\n+ v = v.strip(\"'\").strip('\"')\n if col_obj.is_num:\n v = utils.string_to_num(v)\n " } ]
incubator-superset
d9161fb76a9481fb8cf88812be1b9b8f97a028f3
85b18ff5e7eb372b19f2b1174e08a41cc8d317b0
superset/views/core.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -515,7 +515,7 @@ class DashboardModelView(SupersetModelView, DeleteMixin): # noqa obj.slug = obj.slug.strip() or None if obj.slug: obj.slug = obj.slug.replace(' ', '-') - obj.slug = re.sub(r'\W+', '', obj.slug) + obj.slug = re.sub(r'[^a-zA-Z0-9\-]+', '', obj.slug) if g.user not in obj.owners: obj.owners.append(g.user) utils.validate_json(obj.json_metadata)
obj . slug = re . sub ( r'\W+' , '' , obj . slug )
obj . slug = re . sub ( r'[^a-zA-Z0-9\-]+' , '' , obj . slug )
CHANGE_STRING_LITERAL
[["Update", ["string:r'\\W+'", 3, 31, 3, 37], "r'[^a-zA-Z0-9\\-]+'"]]
xunyuw/incubator-superset@d9161fb76a9481fb8cf88812be1b9b8f97a028f3
Fix slug function (#3876)
[ { "sha": "17f51518f5e5e108b9115224c1b9ccfd933153b9", "filename": "superset/views/core.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/d9161fb76a9481fb8cf88812be1b9b8f97a028f3/superset%2Fviews%2Fcore.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/d9161fb76a9481fb8cf88812be1b9b8f97a028f3/superset%2Fviews%2Fcore.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviews%2Fcore.py?ref=d9161fb76a9481fb8cf88812be1b9b8f97a028f3", "patch": "@@ -515,7 +515,7 @@ def pre_add(self, obj):\n obj.slug = obj.slug.strip() or None\n if obj.slug:\n obj.slug = obj.slug.replace(' ', '-')\n- obj.slug = re.sub(r'\\W+', '', obj.slug)\n+ obj.slug = re.sub(r'[^a-zA-Z0-9\\-]+', '', obj.slug)\n if g.user not in obj.owners:\n obj.owners.append(g.user)\n utils.validate_json(obj.json_metadata)" } ]
incubator-superset
5ee70b244b2751b98fefcfc18f11c6109ed7cdcd
a26cf001c465d5d91f0bc8451a886f4145d1d1fe
superset/connectors/base/models.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -222,7 +222,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin): num_types = ( 'DOUBLE', 'FLOAT', 'INT', 'BIGINT', - 'LONG', 'REAL', 'NUMERIC', 'DECIMAL', + 'LONG', 'REAL', 'NUMERIC', 'DECIMAL', 'MONEY', ) date_types = ('DATE', 'TIME', 'DATETIME') str_types = ('VARCHAR', 'STRING', 'CHAR')
num_types = ( 'DOUBLE' , 'FLOAT' , 'INT' , 'BIGINT' , 'LONG' , 'REAL' , 'NUMERIC' , 'DECIMAL' , )
num_types = ( 'DOUBLE' , 'FLOAT' , 'INT' , 'BIGINT' , 'LONG' , 'REAL' , 'NUMERIC' , 'DECIMAL' , 'MONEY' , )
ADD_ELEMENTS_TO_ITERABLE
[["Move", [",:,", 2, 17, 2, 18], ["tuple", 1, 17, 4, 6], 15], ["Insert", ["tuple", 1, 17, 4, 6], [",:,", "T"], 2], ["Insert", ["tuple", 1, 17, 4, 6], ["string:'MONEY'", "T"], 4], ["Insert", ["tuple", 1, 17, 4, 6], [",:,", "T"], 5], ["Delete", [",:,", 3, 45, 3, 46]]]
xunyuw/incubator-superset@5ee70b244b2751b98fefcfc18f11c6109ed7cdcd
Add type MONEY as numeric type (#3959) fixes https://github.com/apache/incubator-superset/issues/3953
[ { "sha": "9bead749ddd1a122f85d200722a690c4d9d56c35", "filename": "superset/connectors/base/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/5ee70b244b2751b98fefcfc18f11c6109ed7cdcd/superset%2Fconnectors%2Fbase%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/5ee70b244b2751b98fefcfc18f11c6109ed7cdcd/superset%2Fconnectors%2Fbase%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fbase%2Fmodels.py?ref=5ee70b244b2751b98fefcfc18f11c6109ed7cdcd", "patch": "@@ -222,7 +222,7 @@ def __repr__(self):\n \n num_types = (\n 'DOUBLE', 'FLOAT', 'INT', 'BIGINT',\n- 'LONG', 'REAL', 'NUMERIC', 'DECIMAL',\n+ 'LONG', 'REAL', 'NUMERIC', 'DECIMAL', 'MONEY',\n )\n date_types = ('DATE', 'TIME', 'DATETIME')\n str_types = ('VARCHAR', 'STRING', 'CHAR')" } ]
incubator-superset
23c834f04e5d2e263556d14af216f44ed37b6dd6
c84211ec449a6fc0e80f1e1b6df54cb4340ca3f7
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -5,7 +5,7 @@ import subprocess from setuptools import find_packages, setup BASE_DIR = os.path.abspath(os.path.dirname(__file__)) -PACKAGE_DIR = os.path.join(BASE_DIR, 'superset', 'assets') +PACKAGE_DIR = os.path.join(BASE_DIR, 'superset', 'static', 'assets') PACKAGE_FILE = os.path.join(PACKAGE_DIR, 'package.json') with open(PACKAGE_FILE) as package_file: version_string = json.load(package_file)['version']
PACKAGE_DIR = os . path . join ( BASE_DIR , 'superset' , 'assets' )
PACKAGE_DIR = os . path . join ( BASE_DIR , 'superset' , 'static' , 'assets' )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 27, 3, 59], ["string:'static'", "T"], 5], ["Insert", ["argument_list", 3, 27, 3, 59], [",:,", "T"], 6]]
xunyuw/incubator-superset@23c834f04e5d2e263556d14af216f44ed37b6dd6
Fix the pypi build (#4047)
[ { "sha": "87ce6b430920f91db05d1f95109a8d75a6be60aa", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/23c834f04e5d2e263556d14af216f44ed37b6dd6/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/23c834f04e5d2e263556d14af216f44ed37b6dd6/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=23c834f04e5d2e263556d14af216f44ed37b6dd6", "patch": "@@ -5,7 +5,7 @@\n from setuptools import find_packages, setup\n \n BASE_DIR = os.path.abspath(os.path.dirname(__file__))\n-PACKAGE_DIR = os.path.join(BASE_DIR, 'superset', 'assets')\n+PACKAGE_DIR = os.path.join(BASE_DIR, 'superset', 'static', 'assets')\n PACKAGE_FILE = os.path.join(PACKAGE_DIR, 'package.json')\n with open(PACKAGE_FILE) as package_file:\n version_string = json.load(package_file)['version']" } ]
incubator-superset
e79d05fd775e8f44aad4d5af11e8d25eb58599b1
fc85756c20dc9c9ab94eb11adb5a6c5865afa0a5
superset/db_engine_specs.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -1073,7 +1073,7 @@ class OracleEngineSpec(PostgresEngineSpec): @classmethod def convert_dttm(cls, target_type, dttm): return ( - """TO_TIMESTAMP('{}', 'YYYY-MM-DD'T'HH24:MI:SS.ff6')""" + """TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""" ).format(dttm.isoformat())
return ( """TO_TIMESTAMP('{}', 'YYYY-MM-DD'T'HH24:MI:SS.ff6')""" ) . format ( dttm . isoformat ( ) )
return ( """TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""" ) . format ( dttm . isoformat ( ) )
CHANGE_STRING_LITERAL
[["Update", ["string:\"\"\"TO_TIMESTAMP('{}', 'YYYY-MM-DD'T'HH24:MI:SS.ff6')\"\"\"", 3, 13, 3, 68], "\"\"\"TO_TIMESTAMP('{}', 'YYYY-MM-DD\"T\"HH24:MI:SS.ff6')\"\"\""]]
xunyuw/incubator-superset@e79d05fd775e8f44aad4d5af11e8d25eb58599b1
#4058 Fix Oracle timestamps (Oracle "ORA-00907: missing right parenthesis" error) (#4065)
[ { "sha": "a176a25b3c1fb6f5419080970d814211b5c2ac2b", "filename": "superset/db_engine_specs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/e79d05fd775e8f44aad4d5af11e8d25eb58599b1/superset%2Fdb_engine_specs.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/e79d05fd775e8f44aad4d5af11e8d25eb58599b1/superset%2Fdb_engine_specs.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fdb_engine_specs.py?ref=e79d05fd775e8f44aad4d5af11e8d25eb58599b1", "patch": "@@ -1073,7 +1073,7 @@ class OracleEngineSpec(PostgresEngineSpec):\n @classmethod\n def convert_dttm(cls, target_type, dttm):\n return (\n- \"\"\"TO_TIMESTAMP('{}', 'YYYY-MM-DD'T'HH24:MI:SS.ff6')\"\"\"\n+ \"\"\"TO_TIMESTAMP('{}', 'YYYY-MM-DD\"T\"HH24:MI:SS.ff6')\"\"\"\n ).format(dttm.isoformat())\n \n " } ]
incubator-superset
e498f2fcb6600237aad37f5f90b731f9440ca1ac
f7c55270db9ac757ea8a69596a7a0fc9aff27385
superset/db_engine_specs.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -806,7 +806,7 @@ class HiveEngineSpec(PrestoEngineSpec): table_name = form.name.data filename = form.csv_file.data.filename - bucket_path = app.config['CSV_TO_HIVE_UPLOAD_BUCKET'] + bucket_path = app.config['CSV_TO_HIVE_UPLOAD_S3_BUCKET'] if not bucket_path: logging.info('No upload bucket specified')
bucket_path = app . config [ 'CSV_TO_HIVE_UPLOAD_BUCKET' ]
bucket_path = app . config [ 'CSV_TO_HIVE_UPLOAD_S3_BUCKET' ]
CHANGE_STRING_LITERAL
[["Update", ["string:'CSV_TO_HIVE_UPLOAD_BUCKET'", 3, 34, 3, 61], "'CSV_TO_HIVE_UPLOAD_S3_BUCKET'"]]
xunyuw/incubator-superset@e498f2fcb6600237aad37f5f90b731f9440ca1ac
fix variable name (#4139)
[ { "sha": "e02d477f43090d48e1814d71ca1cd7876fbad3d9", "filename": "superset/db_engine_specs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/e498f2fcb6600237aad37f5f90b731f9440ca1ac/superset%2Fdb_engine_specs.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/e498f2fcb6600237aad37f5f90b731f9440ca1ac/superset%2Fdb_engine_specs.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fdb_engine_specs.py?ref=e498f2fcb6600237aad37f5f90b731f9440ca1ac", "patch": "@@ -806,7 +806,7 @@ def get_column_names(filepath):\n table_name = form.name.data\n filename = form.csv_file.data.filename\n \n- bucket_path = app.config['CSV_TO_HIVE_UPLOAD_BUCKET']\n+ bucket_path = app.config['CSV_TO_HIVE_UPLOAD_S3_BUCKET']\n \n if not bucket_path:\n logging.info('No upload bucket specified')" } ]
incubator-superset
c9e47f0bb3f908bfc0634126f1548de8c0fa0b3c
686023c8ddd58d4d562128c794d48524bc635217
superset/db_engine_specs.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -502,7 +502,7 @@ class PrestoEngineSpec(BaseEngineSpec): @classmethod def adjust_database_uri(cls, uri, selected_schema=None): database = uri.database - if selected_schema: + if selected_schema and database: if '/' in database: database = database.split('/')[0] + '/' + selected_schema else:
if selected_schema : if '/' in database : database = database . split ( '/' ) [ 0 ] + '/' + selected_schema else :
if selected_schema and database : if '/' in database : database = database . split ( '/' ) [ 0 ] + '/' + selected_schema else :
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 9, 6, 18], ["boolean_operator", "N0"], 1], ["Move", "N0", ["identifier:selected_schema", 3, 12, 3, 27], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["identifier:database", "T"], 2]]
xunyuw/incubator-superset@c9e47f0bb3f908bfc0634126f1548de8c0fa0b3c
Check for non-None database before using. (#4162) Some valid sqlalchemy uri's return a URL object with database=None, which causes the following error: ``` 2018-01-05 17:59:47,560:ERROR:root:argument of type 'NoneType' is not iterable Traceback (most recent call last): File "/opt/incubator-superset/superset/sql_lab.py", line 186, in execute_sql user_name=user_name, File "/opt/incubator-superset/superset/utils.py", line 124, in __call__ return self.func(*args, **kwargs) File "/opt/incubator-superset/superset/models/core.py", line 644, in get_sqla_engine url = self.db_engine_spec.adjust_database_uri(url, schema) File "/opt/incubator-superset/superset/db_engine_specs.py", line 505, in adjust_database_uri if '/' in database: TypeError: argument of type 'NoneType' is not iterable ``` This patch corrects that problem.
[ { "sha": "d26f633bbd26684f09cf532a05943ad253eab874", "filename": "superset/db_engine_specs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/c9e47f0bb3f908bfc0634126f1548de8c0fa0b3c/superset%2Fdb_engine_specs.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/c9e47f0bb3f908bfc0634126f1548de8c0fa0b3c/superset%2Fdb_engine_specs.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fdb_engine_specs.py?ref=c9e47f0bb3f908bfc0634126f1548de8c0fa0b3c", "patch": "@@ -502,7 +502,7 @@ def patch(cls):\n @classmethod\n def adjust_database_uri(cls, uri, selected_schema=None):\n database = uri.database\n- if selected_schema:\n+ if selected_schema and database:\n if '/' in database:\n database = database.split('/')[0] + '/' + selected_schema\n else:" } ]
incubator-superset
b159e517876a9313cce07caf0605bed4df1fea58
d57012067bccec95f3720dd290fd04c0457d6da8
superset/connectors/sqla/models.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -706,7 +706,7 @@ class SqlaTable(Model, BaseDatasource): if not any_date_col and dbcol.is_time: any_date_col = col.name - quoted = str(col.compile(dialect=db_dialect)) + quoted = col.name if dbcol.sum: metrics.append(M( metric_name='sum__' + dbcol.column_name,
quoted = str ( col . compile ( dialect = db_dialect ) )
quoted = col . name
SINGLE_STMT
[["Move", ["assignment", 3, 13, 3, 58], ["attribute", 3, 26, 3, 37], 2], ["Update", ["identifier:compile", 3, 30, 3, 37], "name"], ["Delete", ["identifier:str", 3, 22, 3, 25]], ["Delete", ["(:(", 3, 25, 3, 26]], ["Delete", ["(:(", 3, 37, 3, 38]], ["Delete", ["identifier:dialect", 3, 38, 3, 45]], ["Delete", ["=:=", 3, 45, 3, 46]], ["Delete", ["identifier:db_dialect", 3, 46, 3, 56]], ["Delete", ["keyword_argument", 3, 38, 3, 56]], ["Delete", ["):)", 3, 56, 3, 57]], ["Delete", ["argument_list", 3, 37, 3, 57]], ["Delete", ["call", 3, 26, 3, 57]], ["Delete", ["):)", 3, 57, 3, 58]], ["Delete", ["argument_list", 3, 25, 3, 58]], ["Delete", ["call", 3, 22, 3, 58]]]
xunyuw/incubator-superset@b159e517876a9313cce07caf0605bed4df1fea58
Don't use fully qualified column names in metric definitions (#4101) When generating an auto SUM() metric on a column, Superset currently will go `SUM(table_name.column_name)`. This is an issue when moving to point to another table. It's common to work on some temporary table or work table and eventually need to point Superset to an alternate table.
[ { "sha": "8ac6e8289a342e5585bdbc4c737084cbff6189e7", "filename": "superset/connectors/sqla/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/b159e517876a9313cce07caf0605bed4df1fea58/superset%2Fconnectors%2Fsqla%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/b159e517876a9313cce07caf0605bed4df1fea58/superset%2Fconnectors%2Fsqla%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fsqla%2Fmodels.py?ref=b159e517876a9313cce07caf0605bed4df1fea58", "patch": "@@ -706,7 +706,7 @@ def fetch_metadata(self):\n if not any_date_col and dbcol.is_time:\n any_date_col = col.name\n \n- quoted = str(col.compile(dialect=db_dialect))\n+ quoted = col.name\n if dbcol.sum:\n metrics.append(M(\n metric_name='sum__' + dbcol.column_name," } ]
incubator-superset
04680e5ff138f7113a3d655133307049bc91ff3d
a7a6678d5ca535e29e6e021b7404c2e5c3599fdb
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -946,7 +946,7 @@ class NVD3TimeSeriesViz(NVD3Viz): if isinstance(series_title, string_types): series_title += title_suffix elif title_suffix and isinstance(series_title, (list, tuple)): - series_title = series_title + (title_suffix,) + series_title = text_type(series_title[-1]) + title_suffix values = [] for ds in df.index:
series_title = series_title + ( title_suffix , )
series_title = text_type ( series_title [ - 1 ] ) + title_suffix
SINGLE_STMT
[["Insert", ["binary_operator", 3, 32, 3, 62], ["call", "N0"], 0], ["Move", ["binary_operator", 3, 32, 3, 62], ["identifier:title_suffix", 3, 48, 3, 60], 3], ["Insert", "N0", ["identifier:text_type", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Move", "N1", ["(:(", 3, 47, 3, 48], 0], ["Insert", "N1", ["subscript", "N2"], 1], ["Move", "N1", ["):)", 3, 61, 3, 62], 2], ["Move", "N2", ["identifier:series_title", 3, 32, 3, 44], 0], ["Insert", "N2", ["[:[", "T"], 1], ["Insert", "N2", ["unary_operator", "N3"], 2], ["Insert", "N2", ["]:]", "T"], 3], ["Insert", "N3", ["-:-", "T"], 0], ["Insert", "N3", ["integer:1", "T"], 1], ["Delete", [",:,", 3, 60, 3, 61]], ["Delete", ["tuple", 3, 47, 3, 62]]]
xunyuw/incubator-superset@04680e5ff138f7113a3d655133307049bc91ff3d
[line chart] fix time shift color (#4202)
[ { "sha": "e89dca921f13fcc2966bfe46abc897e9d650e317", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/04680e5ff138f7113a3d655133307049bc91ff3d/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/04680e5ff138f7113a3d655133307049bc91ff3d/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=04680e5ff138f7113a3d655133307049bc91ff3d", "patch": "@@ -946,7 +946,7 @@ def to_series(self, df, classed='', title_suffix=''):\n if isinstance(series_title, string_types):\n series_title += title_suffix\n elif title_suffix and isinstance(series_title, (list, tuple)):\n- series_title = series_title + (title_suffix,)\n+ series_title = text_type(series_title[-1]) + title_suffix\n \n values = []\n for ds in df.index:" } ]
incubator-superset
8175e19f7225dfd9bf70e55f04d816a7639509e2
7b7635618238ea57bb8e2d503b009fa694970855
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -289,7 +289,7 @@ class BaseViz(object): self.status != utils.QueryStatus.FAILED): cached_dttm = datetime.utcnow().isoformat().split('.')[0] try: - cache_value = json.dumps({ + cache_value = self.json_dumps({ 'data': data, 'dttm': cached_dttm, })
cache_value = json . dumps ( { 'data' : data , 'dttm' : cached_dttm , } )
cache_value = self . json_dumps ( { 'data' : data , 'dttm' : cached_dttm , } )
SINGLE_STMT
[["Update", ["identifier:json", 3, 35, 3, 39], "self"], ["Update", ["identifier:dumps", 3, 40, 3, 45], "json_dumps"]]
xunyuw/incubator-superset@8175e19f7225dfd9bf70e55f04d816a7639509e2
[cache] Fixing json.dumps for timestamp (#4240)
[ { "sha": "bb0bcf604de7c4f30f9025b7658e8d73ccd41007", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/8175e19f7225dfd9bf70e55f04d816a7639509e2/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/8175e19f7225dfd9bf70e55f04d816a7639509e2/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=8175e19f7225dfd9bf70e55f04d816a7639509e2", "patch": "@@ -289,7 +289,7 @@ def get_payload(self, force=False):\n self.status != utils.QueryStatus.FAILED):\n cached_dttm = datetime.utcnow().isoformat().split('.')[0]\n try:\n- cache_value = json.dumps({\n+ cache_value = self.json_dumps({\n 'data': data,\n 'dttm': cached_dttm,\n })" } ]
incubator-superset
4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af
04ae004f4345c0cc2cf3f9bf170488a36a78ef72
superset/models/core.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -639,7 +639,7 @@ class Database(Model, AuditMixinNullable, ImportMixin): @utils.memoized( watch=('impersonate_user', 'sqlalchemy_uri_decrypted', 'extra')) - def get_sqla_engine(self, schema=None, nullpool=False, user_name=None): + def get_sqla_engine(self, schema=None, nullpool=True, user_name=None): extra = self.get_extra() url = make_url(self.sqlalchemy_uri_decrypted) url = self.db_engine_spec.adjust_database_uri(url, schema)
def get_sqla_engine ( self , schema = None , nullpool = False , user_name = None ) : extra = self . get_extra ( ) url = make_url ( self . sqlalchemy_uri_decrypted ) url = self . db_engine_spec . adjust_database_uri ( url , schema )
def get_sqla_engine ( self , schema = None , nullpool = True , user_name = None ) : extra = self . get_extra ( ) url = make_url ( self . sqlalchemy_uri_decrypted ) url = self . db_engine_spec . adjust_database_uri ( url , schema )
CHANGE_BOOLEAN_LITERAL
[["Insert", ["default_parameter", 3, 44, 3, 58], ["true:True", "T"], 2], ["Delete", ["false:False", 3, 53, 3, 58]]]
xunyuw/incubator-superset@4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af
Using a NullPool for external connections by default (#4251) Currently, even though `get_sqla_engine` calls get memoized, engines are still short lived since they are attached to an models.Database ORM object. All engines created through this method have the scope of a web request. Knowing that the SQLAlchemy objects are short lived means that a related connection pool would also be short lived and mostly useless. I think it's pretty rare that connections get reused within the context of a view or Celery worker task. We've noticed on Redshift that Superset was leaving many connections opened (hundreds). This is probably due to a combination of the current process not garbage collecting connections properly, and perhaps the absence of connection timeout on the redshift side of things. This could also be related to the fact that we experience web requests timeouts (enforced by gunicorn) and that process-killing may not allow SQLAlchemy to clean up connections as they occur (which this PR may not help fixing...) For all these reasons, it seems like the right thing to do to use NullPool for external connection (but not for our connection to the metadata db!). Opening the PR for conversation. Putting this query into our staging today to run some tests.
[ { "sha": "9f26a27d3c9548cb831b90c02de85dc0091cf51e", "filename": "superset/models/core.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af/superset%2Fmodels%2Fcore.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af/superset%2Fmodels%2Fcore.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fmodels%2Fcore.py?ref=4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af", "patch": "@@ -639,7 +639,7 @@ def get_effective_user(self, url, user_name=None):\n \n @utils.memoized(\n watch=('impersonate_user', 'sqlalchemy_uri_decrypted', 'extra'))\n- def get_sqla_engine(self, schema=None, nullpool=False, user_name=None):\n+ def get_sqla_engine(self, schema=None, nullpool=True, user_name=None):\n extra = self.get_extra()\n url = make_url(self.sqlalchemy_uri_decrypted)\n url = self.db_engine_spec.adjust_database_uri(url, schema)" } ]
incubator-superset
ec069676fd2d9d70e61c4e28ad0989ebba815684
33aa976e3d467847d4bc2940417e0e5ec873eb3a
superset/config.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -246,7 +246,7 @@ INTERVAL = 1 BACKUP_COUNT = 30 # Set this API key to enable Mapbox visualizations -MAPBOX_API_KEY = '' +MAPBOX_API_KEY = os.environ.get('MAPBOX_API_KEY', '') # Maximum number of rows returned in the SQL editor SQL_MAX_ROW = 1000000
MAPBOX_API_KEY = ''
MAPBOX_API_KEY = os . environ . get ( 'MAPBOX_API_KEY' , '' )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["assignment", 3, 1, 3, 20], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["attribute", "N3"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:get", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["string:'MAPBOX_API_KEY'", "T"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Move", "N2", ["string:''", 3, 18, 3, 20], 3], ["Insert", "N2", ["):)", "T"], 4], ["Insert", "N3", ["identifier:os", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:environ", "T"], 2]]
xunyuw/incubator-superset@ec069676fd2d9d70e61c4e28ad0989ebba815684
Better default for MAPBOX_API_KEY (#4660)
[ { "sha": "0ff4e3b625b9f178c9ee378e9b889868d8251cbc", "filename": "superset/config.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/ec069676fd2d9d70e61c4e28ad0989ebba815684/superset%2Fconfig.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/ec069676fd2d9d70e61c4e28ad0989ebba815684/superset%2Fconfig.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconfig.py?ref=ec069676fd2d9d70e61c4e28ad0989ebba815684", "patch": "@@ -246,7 +246,7 @@\n BACKUP_COUNT = 30\n \n # Set this API key to enable Mapbox visualizations\n-MAPBOX_API_KEY = ''\n+MAPBOX_API_KEY = os.environ.get('MAPBOX_API_KEY', '')\n \n # Maximum number of rows returned in the SQL editor\n SQL_MAX_ROW = 1000000" } ]
incubator-superset
52b925fee80c0d46aeb444326ac499296d682396
336a1064d63a3952ffd8ed69b65d6c3d1f8af3d9
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -1149,7 +1149,7 @@ class NVD3TimeSeriesViz(NVD3Viz): if self._extra_chart_data: chart_data += self._extra_chart_data - chart_data = sorted(chart_data, key=lambda x: x['key']) + chart_data = sorted(chart_data, key=lambda x: tuple(x['key'])) return chart_data
chart_data = sorted ( chart_data , key = lambda x : x [ 'key' ] )
chart_data = sorted ( chart_data , key = lambda x : tuple ( x [ 'key' ] ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["lambda", 3, 49, 3, 67], ["call", "N0"], 3], ["Insert", "N0", ["identifier:tuple", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["subscript", 3, 59, 3, 67], 1], ["Insert", "N1", ["):)", "T"], 2]]
xunyuw/incubator-superset@52b925fee80c0d46aeb444326ac499296d682396
Fix bug with sorting columns in group by using time shift (#4683)
[ { "sha": "fc874301619c3d489a40cde256b3e2d1051622a4", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/52b925fee80c0d46aeb444326ac499296d682396/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/52b925fee80c0d46aeb444326ac499296d682396/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=52b925fee80c0d46aeb444326ac499296d682396", "patch": "@@ -1149,7 +1149,7 @@ def get_data(self, df):\n \n if self._extra_chart_data:\n chart_data += self._extra_chart_data\n- chart_data = sorted(chart_data, key=lambda x: x['key'])\n+ chart_data = sorted(chart_data, key=lambda x: tuple(x['key']))\n \n return chart_data\n " } ]
incubator-superset
aa4173dc81e4b1fdc945037042d01f01af41a915
8be0bde6831749219e87af09da76404cba3c7a6c
superset/connectors/druid/models.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -1102,7 +1102,7 @@ class DruidDatasource(Model, BaseDatasource): inner_from_dttm = inner_from_dttm or from_dttm inner_to_dttm = inner_to_dttm or to_dttm - timezone = from_dttm.tzname() if from_dttm else None + timezone = from_dttm.replace(tzinfo=DRUID_TZ).tzname() if from_dttm else None query_str = '' metrics_dict = {m.metric_name: m for m in self.metrics}
timezone = from_dttm . tzname ( ) if from_dttm else None
timezone = from_dttm . replace ( tzinfo = DRUID_TZ ) . tzname ( ) if from_dttm else None
ADD_METHOD_CALL
[["Insert", ["attribute", 3, 20, 3, 36], ["call", "N0"], 0], ["Insert", ["attribute", 3, 20, 3, 36], [".:.", "T"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["identifier:from_dttm", 3, 20, 3, 29], 0], ["Move", "N1", [".:.", 3, 29, 3, 30], 1], ["Insert", "N1", ["identifier:replace", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["keyword_argument", "N3"], 1], ["Insert", "N2", ["):)", "T"], 2], ["Insert", "N3", ["identifier:tzinfo", "T"], 0], ["Insert", "N3", ["=:=", "T"], 1], ["Insert", "N3", ["identifier:DRUID_TZ", "T"], 2]]
xunyuw/incubator-superset@aa4173dc81e4b1fdc945037042d01f01af41a915
Pass timezone to Druid Query granularity (#4648) superset appends DRUID_TZ info to intervals but not to granularity which causes one day's data return as 2 days. This fix is also pass DRUID_TZ to granularity.
[ { "sha": "9a8d6584d8554ae0984bc0d4b790be8bc5a4006f", "filename": "superset/connectors/druid/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/aa4173dc81e4b1fdc945037042d01f01af41a915/superset%2Fconnectors%2Fdruid%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/aa4173dc81e4b1fdc945037042d01f01af41a915/superset%2Fconnectors%2Fdruid%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fdruid%2Fmodels.py?ref=aa4173dc81e4b1fdc945037042d01f01af41a915", "patch": "@@ -1102,7 +1102,7 @@ def run_query( # noqa / druid\n inner_from_dttm = inner_from_dttm or from_dttm\n inner_to_dttm = inner_to_dttm or to_dttm\n \n- timezone = from_dttm.tzname() if from_dttm else None\n+ timezone = from_dttm.replace(tzinfo=DRUID_TZ).tzname() if from_dttm else None\n \n query_str = ''\n metrics_dict = {m.metric_name: m for m in self.metrics}" } ]
incubator-superset
918399d4e294e86e7b7efd85e8d7c1bcddcfe2eb
e29beba023299efaed29484cb1a17b2064a2096f
superset/connectors/druid/models.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -1318,7 +1318,7 @@ class DruidDatasource(Model, BaseDatasource): for col in groupby_cols: - df[col] = df[col].fillna('<NULL>').astype(str) + df[col] = df[col].fillna('<NULL>').astype('unicode') return df def query(self, query_obj):
df [ col ] = df [ col ] . fillna ( '<NULL>' ) . astype ( str )
df [ col ] = df [ col ] . fillna ( '<NULL>' ) . astype ( 'unicode' )
SINGLE_TOKEN
[["Insert", ["argument_list", 1, 54, 1, 59], ["string:'unicode'", "T"], 1], ["Delete", ["identifier:str", 1, 55, 1, 58]]]
xunyuw/incubator-superset@918399d4e294e86e7b7efd85e8d7c1bcddcfe2eb
[bugfix] handling UTF8 in Druid dimensions (#4943)
[ { "sha": "079648fe733b5987d9f250741e2803868b306ee2", "filename": "superset/connectors/druid/models.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/918399d4e294e86e7b7efd85e8d7c1bcddcfe2eb/superset%2Fconnectors%2Fdruid%2Fmodels.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/918399d4e294e86e7b7efd85e8d7c1bcddcfe2eb/superset%2Fconnectors%2Fdruid%2Fmodels.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fdruid%2Fmodels.py?ref=918399d4e294e86e7b7efd85e8d7c1bcddcfe2eb", "patch": "@@ -1318,7 +1318,7 @@ def homogenize_types(df, groupby_cols):\n str instead of an object.\n \"\"\"\n for col in groupby_cols:\n- df[col] = df[col].fillna('<NULL>').astype(str)\n+ df[col] = df[col].fillna('<NULL>').astype('unicode')\n return df\n \n def query(self, query_obj):" } ]
incubator-superset
2c5200affdae7995a12e8680f0218fb5aa080bc8
7d5195aae344167cfcea344eb0ca959d8539ff23
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -55,7 +55,7 @@ setup( 'celery', 'colorama', 'cryptography', - 'flask', + 'flask<=1.0.0', 'flask-appbuilder', 'flask-caching', 'flask-compress',
'flask' ,
'flask<=1.0.0' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'flask'", 3, 9, 3, 16], "'flask<=1.0.0'"]]
xunyuw/incubator-superset@2c5200affdae7995a12e8680f0218fb5aa080bc8
[deps] force flask<=1.0.0 (#4959) flask 1.0 came out and has backwards incompatible changes. People are reporting that fresh install doesn't work anymore. fixes https://github.com/apache/incubator-superset/issues/4953 We should ship a 0.25.1 with this in
[ { "sha": "204271e5b5f20b64382465aa9e41327c813c21e2", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/2c5200affdae7995a12e8680f0218fb5aa080bc8/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/2c5200affdae7995a12e8680f0218fb5aa080bc8/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=2c5200affdae7995a12e8680f0218fb5aa080bc8", "patch": "@@ -55,7 +55,7 @@ def get_git_sha():\n 'celery',\n 'colorama',\n 'cryptography',\n- 'flask',\n+ 'flask<=1.0.0',\n 'flask-appbuilder',\n 'flask-caching',\n 'flask-compress'," } ]
incubator-superset
2ba929ac9e1d9e6ce7120e021e3f62ba769f8f6e
e72c9cded37f7e1b6eaf3c5192249f5d08ce5ce4
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -55,7 +55,7 @@ setup( 'celery', 'colorama', 'cryptography', - 'flask<=1.0.0', + 'flask<1.0.0', 'flask-appbuilder', 'flask-caching', 'flask-compress',
'flask<=1.0.0' ,
'flask<1.0.0' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'flask<=1.0.0'", 3, 9, 3, 23], "'flask<1.0.0'"]]
xunyuw/incubator-superset@2ba929ac9e1d9e6ce7120e021e3f62ba769f8f6e
Fix flask<1.0.0 Made a mistake originally, meant to flask<1.0.0
[ { "sha": "f463f624a8561177434e94a2c82116d7735d1a6a", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/2ba929ac9e1d9e6ce7120e021e3f62ba769f8f6e/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/2ba929ac9e1d9e6ce7120e021e3f62ba769f8f6e/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=2ba929ac9e1d9e6ce7120e021e3f62ba769f8f6e", "patch": "@@ -55,7 +55,7 @@ def get_git_sha():\n 'celery',\n 'colorama',\n 'cryptography',\n- 'flask<=1.0.0',\n+ 'flask<1.0.0',\n 'flask-appbuilder',\n 'flask-caching',\n 'flask-compress'," } ]
incubator-superset
b312cdad2f1621e99fb49a18a4e741c249caf6ef
973c661501edd06ff64ef723cea14639744f06f7
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -691,7 +691,7 @@ class PivotTableViz(BaseViz): df = df.pivot_table( index=self.form_data.get('groupby'), columns=self.form_data.get('columns'), - values=self.form_data.get('metrics'), + values=[self.get_metric_label(m) for m in self.form_data.get('metrics')], aggfunc=self.form_data.get('pandas_aggfunc'), margins=self.form_data.get('pivot_margins'), )
df = df . pivot_table ( index = self . form_data . get ( 'groupby' ) , columns = self . form_data . get ( 'columns' ) , values = self . form_data . get ( 'metrics' ) , aggfunc = self . form_data . get ( 'pandas_aggfunc' ) , margins = self . form_data . get ( 'pivot_margins' ) , )
df = df . pivot_table ( index = self . form_data . get ( 'groupby' ) , columns = self . form_data . get ( 'columns' ) , values = [ self . get_metric_label ( m ) for m in self . form_data . get ( 'metrics' ) ] , aggfunc = self . form_data . get ( 'pandas_aggfunc' ) , margins = self . form_data . get ( 'pivot_margins' ) , )
SINGLE_STMT
[["Insert", ["keyword_argument", 3, 13, 3, 49], ["list_comprehension", "N0"], 2], ["Insert", "N0", ["[:[", "T"], 0], ["Insert", "N0", ["call", "N1"], 1], ["Insert", "N0", ["for_in_clause", "N2"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Insert", "N1", ["attribute", "N3"], 0], ["Insert", "N1", ["argument_list", "N4"], 1], ["Insert", "N2", ["for:for", "T"], 0], ["Insert", "N2", ["identifier:m", "T"], 1], ["Insert", "N2", ["in:in", "T"], 2], ["Move", "N2", ["call", 3, 20, 3, 49], 3], ["Insert", "N3", ["identifier:self", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:get_metric_label", "T"], 2], ["Insert", "N4", ["(:(", "T"], 0], ["Insert", "N4", ["identifier:m", "T"], 1], ["Insert", "N4", ["):)", "T"], 2]]
xunyuw/incubator-superset@b312cdad2f1621e99fb49a18a4e741c249caf6ef
fix metrics type error in pivot table viz (#5025) transfer metrics dict label to list of string
[ { "sha": "5565993ee6fa722f2c34468e8da149de16ee1d54", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/b312cdad2f1621e99fb49a18a4e741c249caf6ef/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/b312cdad2f1621e99fb49a18a4e741c249caf6ef/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=b312cdad2f1621e99fb49a18a4e741c249caf6ef", "patch": "@@ -691,7 +691,7 @@ def get_data(self, df):\n df = df.pivot_table(\n index=self.form_data.get('groupby'),\n columns=self.form_data.get('columns'),\n- values=self.form_data.get('metrics'),\n+ values=[self.get_metric_label(m) for m in self.form_data.get('metrics')],\n aggfunc=self.form_data.get('pandas_aggfunc'),\n margins=self.form_data.get('pivot_margins'),\n )" } ]
incubator-superset
ffd65ce623be3d6ecb787b307843a176ba5a7cae
dc21e0dd789cb23e895de6346ab60890a8790885
setup.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -57,7 +57,7 @@ setup( 'contextlib2', 'cryptography', 'flask<1.0.0', - 'flask-appbuilder', + 'flask-appbuilder==1.10.0', # known db migration with 1.11+ 'flask-caching', 'flask-compress', 'flask-migrate',
'flask-appbuilder' ,
'flask-appbuilder==1.10.0' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'flask-appbuilder'", 3, 9, 3, 27], "'flask-appbuilder==1.10.0'"]]
xunyuw/incubator-superset@ffd65ce623be3d6ecb787b307843a176ba5a7cae
Pin FAB to 1.10.0 (#5133) Related to https://github.com/apache/incubator-superset/issues/5088#issuecomment-394064133
[ { "sha": "d222b8c8bafe791eda874c610ab4b1b855b7156d", "filename": "setup.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/ffd65ce623be3d6ecb787b307843a176ba5a7cae/setup.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/ffd65ce623be3d6ecb787b307843a176ba5a7cae/setup.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/setup.py?ref=ffd65ce623be3d6ecb787b307843a176ba5a7cae", "patch": "@@ -57,7 +57,7 @@ def get_git_sha():\n 'contextlib2',\n 'cryptography',\n 'flask<1.0.0',\n- 'flask-appbuilder',\n+ 'flask-appbuilder==1.10.0', # known db migration with 1.11+\n 'flask-caching',\n 'flask-compress',\n 'flask-migrate'," } ]
incubator-superset
95bb1753ab3a9fef572b40d7b7762fd7b4982374
d6846d96ff08b4a4322075f2735107f2ed4d2490
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -555,7 +555,7 @@ class TableViz(BaseViz): m_name = '%' + m df[m_name] = pd.Series(metric_percents[m], name=m_name) # Remove metrics that are not in the main metrics list - metrics = fd.get('metrics', []) + metrics = fd.get('metrics') or [] metrics = [self.get_metric_label(m) for m in metrics] for m in filter( lambda m: m not in metrics and m in df.columns,
metrics = fd . get ( 'metrics' , [ ] )
metrics = fd . get ( 'metrics' ) or [ ]
SINGLE_STMT
[["Insert", ["assignment", 3, 13, 3, 44], ["boolean_operator", "N0"], 2], ["Insert", "N0", ["call", "N1"], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Move", "N0", ["list", 3, 41, 3, 43], 2], ["Move", "N1", ["attribute", 3, 23, 3, 29], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Move", "N2", ["(:(", 3, 29, 3, 30], 0], ["Move", "N2", ["string:'metrics'", 3, 30, 3, 39], 1], ["Insert", "N2", ["):)", "T"], 2], ["Delete", [",:,", 3, 39, 3, 40]], ["Delete", ["):)", 3, 43, 3, 44]], ["Delete", ["argument_list", 3, 29, 3, 44]], ["Delete", ["call", 3, 23, 3, 44]]]
xunyuw/incubator-superset@95bb1753ab3a9fef572b40d7b7762fd7b4982374
fix empty metrics
[ { "sha": "6015f7c1818c846a939ed9a7c6de7a067a585009", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/95bb1753ab3a9fef572b40d7b7762fd7b4982374/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/95bb1753ab3a9fef572b40d7b7762fd7b4982374/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=95bb1753ab3a9fef572b40d7b7762fd7b4982374", "patch": "@@ -555,7 +555,7 @@ def get_data(self, df):\n m_name = '%' + m\n df[m_name] = pd.Series(metric_percents[m], name=m_name)\n # Remove metrics that are not in the main metrics list\n- metrics = fd.get('metrics', [])\n+ metrics = fd.get('metrics') or []\n metrics = [self.get_metric_label(m) for m in metrics]\n for m in filter(\n lambda m: m not in metrics and m in df.columns," } ]
incubator-superset
e1618ddddba50185375353417dd29826172084aa
b344056932e3247d16ccb4f6a3af9e78cf0549e3
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -223,7 +223,7 @@ class BaseViz(object): """Converting metrics to numeric when pandas.read_sql cannot""" for col, dtype in df.dtypes.items(): if dtype.type == np.object_ and col in metrics: - df[col] = pd.to_numeric(df[col]) + df[col] = pd.to_numeric(df[col], errors='coerce') def query_obj(self): """Building a query object"""
df [ col ] = pd . to_numeric ( df [ col ] )
df [ col ] = pd . to_numeric ( df [ col ] , errors = 'coerce' )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 40, 3, 49], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 40, 3, 49], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:errors", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:'coerce'", "T"], 2]]
xunyuw/incubator-superset@e1618ddddba50185375353417dd29826172084aa
Fix edge case around NaN values (#4964)
[ { "sha": "769a1af5643167ee7f2d17cd7f028f43849f0b98", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/e1618ddddba50185375353417dd29826172084aa/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/e1618ddddba50185375353417dd29826172084aa/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=e1618ddddba50185375353417dd29826172084aa", "patch": "@@ -223,7 +223,7 @@ def df_metrics_to_num(df, metrics):\n \"\"\"Converting metrics to numeric when pandas.read_sql cannot\"\"\"\n for col, dtype in df.dtypes.items():\n if dtype.type == np.object_ and col in metrics:\n- df[col] = pd.to_numeric(df[col])\n+ df[col] = pd.to_numeric(df[col], errors='coerce')\n \n def query_obj(self):\n \"\"\"Building a query object\"\"\"" } ]
incubator-superset
ad9103f5ba8028ca3326aaac533f2952fbf598e6
6fee0587eeac2ed1d288455cf8d773d9e3193127
superset/db_engine_specs.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -84,7 +84,7 @@ class BaseEngineSpec(object): @classmethod def epoch_ms_to_dttm(cls): - return cls.epoch_to_dttm().replace('{col}', '({col}/1000.0)') + return cls.epoch_to_dttm().replace('{col}', '({col}/1000.000)') @classmethod def get_datatype(cls, type_code):
return cls . epoch_to_dttm ( ) . replace ( '{col}' , '({col}/1000.0)' )
return cls . epoch_to_dttm ( ) . replace ( '{col}' , '({col}/1000.000)' )
CHANGE_STRING_LITERAL
[["Update", ["string:'({col}/1000.0)'", 3, 53, 3, 69], "'({col}/1000.000)'"]]
xunyuw/incubator-superset@ad9103f5ba8028ca3326aaac533f2952fbf598e6
[Bug fix] Divide by 1000.000 in epoch_ms_to_dttm() to not lose precision in Presto (#5211) * Fix how the annotation layer interpretes the timestamp string without timezone info; use it as UTC * [Bug fix] Fixed/Refactored annotation layer code so that non-timeseries annotations are applied based on the updated chart object after adding all data * [Bug fix] Fixed/Refactored annotation layer code so that non-timeseries annotations are applied based on the updated chart object after adding all data * Fixed indentation * Fix the key string value in case series.key is a string * Fix the key string value in case series.key is a string * [Bug fix] Divide by 1000.000 in epoch_ms_to_dttm() to not lose precision in Presto * [Bug fix] Divide by 1000.000 in epoch_ms_to_dttm() to not lose precision in Presto
[ { "sha": "0441fe6a6c42a5fce0eb645ede6d88493e9e866b", "filename": "superset/db_engine_specs.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/ad9103f5ba8028ca3326aaac533f2952fbf598e6/superset%2Fdb_engine_specs.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/ad9103f5ba8028ca3326aaac533f2952fbf598e6/superset%2Fdb_engine_specs.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fdb_engine_specs.py?ref=ad9103f5ba8028ca3326aaac533f2952fbf598e6", "patch": "@@ -84,7 +84,7 @@ def epoch_to_dttm(cls):\n \n @classmethod\n def epoch_ms_to_dttm(cls):\n- return cls.epoch_to_dttm().replace('{col}', '({col}/1000.0)')\n+ return cls.epoch_to_dttm().replace('{col}', '({col}/1000.000)')\n \n @classmethod\n def get_datatype(cls, type_code):" } ]
incubator-superset
213fb649e6873f575106483eed8e8114c06023d7
cd55998d6319d8c3ce27f0b9a01e0e12f5300473
superset/models/core.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -726,7 +726,7 @@ class Database(Model, AuditMixinNullable, ImportMixin): return self.get_dialect().identifier_preparer.quote def get_df(self, sql, schema): - sqls = [str(s).strip().strip(';') for s in sqlparse.parse(sql)] + sqls = [six.text_type(s).strip().strip(';') for s in sqlparse.parse(sql)] engine = self.get_sqla_engine(schema=schema) def needs_conversion(df_series):
sqls = [ str ( s ) . strip ( ) . strip ( ';' ) for s in sqlparse . parse ( sql ) ]
sqls = [ six . text_type ( s ) . strip ( ) . strip ( ';' ) for s in sqlparse . parse ( sql ) ]
SINGLE_STMT
[["Insert", ["call", 3, 17, 3, 23], ["attribute", "N0"], 0], ["Update", ["identifier:str", 3, 17, 3, 20], "six"], ["Move", "N0", ["identifier:str", 3, 17, 3, 20], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:text_type", "T"], 2]]
xunyuw/incubator-superset@213fb649e6873f575106483eed8e8114c06023d7
Fix 5479 - unicode columns issue. (#5508)
[ { "sha": "60af53d0b2c4448e4f55f5bdc99525d5adda90c3", "filename": "superset/models/core.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/213fb649e6873f575106483eed8e8114c06023d7/superset%2Fmodels%2Fcore.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/213fb649e6873f575106483eed8e8114c06023d7/superset%2Fmodels%2Fcore.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fmodels%2Fcore.py?ref=213fb649e6873f575106483eed8e8114c06023d7", "patch": "@@ -726,7 +726,7 @@ def get_quoter(self):\n return self.get_dialect().identifier_preparer.quote\n \n def get_df(self, sql, schema):\n- sqls = [str(s).strip().strip(';') for s in sqlparse.parse(sql)]\n+ sqls = [six.text_type(s).strip().strip(';') for s in sqlparse.parse(sql)]\n engine = self.get_sqla_engine(schema=schema)\n \n def needs_conversion(df_series):" } ]
incubator-superset
ad469c72adc044b4a2885f07f9234fbef0820f09
39acd9ff7d6e369b3defc4f3b9b22216759855c9
superset/connectors/sqla/views.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -328,7 +328,7 @@ appbuilder.add_link( 'Tables', label=__('Tables'), href='/tablemodelview/list/?_flt_1_is_sqllab_view=y', - icon='fa-upload', + icon='fa-table', category='Sources', category_label=__('Sources'), category_icon='fa-table')
icon = 'fa-upload' ,
icon = 'fa-table' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'fa-upload'", 3, 10, 3, 21], "'fa-table'"]]
xunyuw/incubator-superset@ad469c72adc044b4a2885f07f9234fbef0820f09
Fixed the wrong icon for Source -> Table (#5574)
[ { "sha": "45143616debf9d59166a9ba16e9fbf25016a6d3d", "filename": "superset/connectors/sqla/views.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/ad469c72adc044b4a2885f07f9234fbef0820f09/superset%2Fconnectors%2Fsqla%2Fviews.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/ad469c72adc044b4a2885f07f9234fbef0820f09/superset%2Fconnectors%2Fsqla%2Fviews.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fconnectors%2Fsqla%2Fviews.py?ref=ad469c72adc044b4a2885f07f9234fbef0820f09", "patch": "@@ -328,7 +328,7 @@ def refresh(self, tables):\n 'Tables',\n label=__('Tables'),\n href='/tablemodelview/list/?_flt_1_is_sqllab_view=y',\n- icon='fa-upload',\n+ icon='fa-table',\n category='Sources',\n category_label=__('Sources'),\n category_icon='fa-table')" } ]
incubator-superset
be04c98cd3a55aec9c9dd6d1457de5655ad20b30
2171ffb630873a6cc31173aa5f7971aaf21c0c03
superset/sql_lab.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -165,7 +165,7 @@ def execute_sql( try: engine = database.get_sqla_engine( schema=query.schema, - nullpool=not ctask.request.called_directly, + nullpool=True, user_name=user_name, ) conn = engine.raw_connection()
engine = database . get_sqla_engine ( schema = query . schema , nullpool = not ctask . request . called_directly , user_name = user_name , )
engine = database . get_sqla_engine ( schema = query . schema , nullpool = True , user_name = user_name , )
SINGLE_STMT
[["Insert", ["keyword_argument", 3, 13, 3, 55], ["true:True", "T"], 2], ["Delete", ["not:not", 3, 22, 3, 25]], ["Delete", ["identifier:ctask", 3, 26, 3, 31]], ["Delete", [".:.", 3, 31, 3, 32]], ["Delete", ["identifier:request", 3, 32, 3, 39]], ["Delete", ["attribute", 3, 26, 3, 39]], ["Delete", [".:.", 3, 39, 3, 40]], ["Delete", ["identifier:called_directly", 3, 40, 3, 55]], ["Delete", ["attribute", 3, 26, 3, 55]], ["Delete", ["not_operator", 3, 22, 3, 55]]]
xunyuw/incubator-superset@be04c98cd3a55aec9c9dd6d1457de5655ad20b30
[sql lab] always use NullPool (#5612) I think that the only place where we want db connection pooling would be to talk to the metadata database. SQL Lab should close its connections and never pool them. Given that each Gunicorn worker will create its own pool that can lead to way too many connections opened. closes https://github.com/apache/incubator-superset/issues/4666
[ { "sha": "a659653d37e766fa25db4a2144c6e35c845bddfd", "filename": "superset/sql_lab.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/be04c98cd3a55aec9c9dd6d1457de5655ad20b30/superset%2Fsql_lab.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/be04c98cd3a55aec9c9dd6d1457de5655ad20b30/superset%2Fsql_lab.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fsql_lab.py?ref=be04c98cd3a55aec9c9dd6d1457de5655ad20b30", "patch": "@@ -165,7 +165,7 @@ def handle_error(msg):\n try:\n engine = database.get_sqla_engine(\n schema=query.schema,\n- nullpool=not ctask.request.called_directly,\n+ nullpool=True,\n user_name=user_name,\n )\n conn = engine.raw_connection()" } ]
incubator-superset
97acfc7f17e6ca3350250aee2500f4f1cadade97
cc9324ae8b0b7053a2fc5a5ced88441204100960
superset/utils.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -958,7 +958,7 @@ def convert_legacy_filters_into_adhoc(fd): fd['adhoc_filters'].append(to_adhoc(fd, 'SQL', clause)) if filters in fd: - for filt in fd[filters]: + for filt in filter(lambda x: x is not None, fd[filters]): fd['adhoc_filters'].append(to_adhoc(filt, 'SIMPLE', clause)) for key in ('filters', 'having', 'having_filters', 'where'):
for filt in fd [ filters ] : fd [ 'adhoc_filters' ] . append ( to_adhoc ( filt , 'SIMPLE' , clause ) )
for filt in filter ( lambda x : x is not None , fd [ filters ] ) : fd [ 'adhoc_filters' ] . append ( to_adhoc ( filt , 'SIMPLE' , clause ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["for_statement", 3, 17, 4, 81], ["call", "N0"], 3], ["Insert", "N0", ["identifier:filter", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["lambda", "N2"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Move", "N1", ["subscript", 3, 29, 3, 40], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["lambda:lambda", "T"], 0], ["Insert", "N2", ["lambda_parameters", "N3"], 1], ["Insert", "N2", [":::", "T"], 2], ["Insert", "N2", ["comparison_operator", "N4"], 3], ["Insert", "N3", ["identifier:x", "T"], 0], ["Insert", "N4", ["identifier:x", "T"], 0], ["Insert", "N4", ["is:is", "T"], 1], ["Insert", "N4", ["not:not", "T"], 2], ["Insert", "N4", ["none:None", "T"], 3]]
xunyuw/incubator-superset@97acfc7f17e6ca3350250aee2500f4f1cadade97
fix multilayer viz backend error (#5649)
[ { "sha": "b1583084ebbc8155e186ec9bb9355f94b0ac5c55", "filename": "superset/utils.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/97acfc7f17e6ca3350250aee2500f4f1cadade97/superset%2Futils.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/97acfc7f17e6ca3350250aee2500f4f1cadade97/superset%2Futils.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Futils.py?ref=97acfc7f17e6ca3350250aee2500f4f1cadade97", "patch": "@@ -958,7 +958,7 @@ def convert_legacy_filters_into_adhoc(fd):\n fd['adhoc_filters'].append(to_adhoc(fd, 'SQL', clause))\n \n if filters in fd:\n- for filt in fd[filters]:\n+ for filt in filter(lambda x: x is not None, fd[filters]):\n fd['adhoc_filters'].append(to_adhoc(filt, 'SIMPLE', clause))\n \n for key in ('filters', 'having', 'having_filters', 'where'):" } ]
incubator-superset
2d23ae1065cd52770ace6cd072aebb74fe195394
683edc377ec80a88c65bdede3e22bd1c7cf917ec
superset/viz.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -203,7 +203,7 @@ class BaseViz(object): # be considered as the default ISO date format # If the datetime format is unix, the parse will use the corresponding # parsing logic. - if not df.empty: + if df is not None and not df.empty: if DTTM_ALIAS in df.columns: if timestamp_format in ('epoch_s', 'epoch_ms'): # Column has already been formatted as a timestamp.
if not df . empty : if DTTM_ALIAS in df . columns : if timestamp_format in ( 'epoch_s' , 'epoch_ms' ) :
if df is not None and not df . empty : if DTTM_ALIAS in df . columns : if timestamp_format in ( 'epoch_s' , 'epoch_ms' ) :
MORE_SPECIFIC_IF
[["Insert", ["if_statement", 3, 9, 6, 72], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Move", "N0", ["not_operator", 3, 12, 3, 24], 2], ["Insert", "N1", ["identifier:df", "T"], 0], ["Insert", "N1", ["is:is", "T"], 1], ["Insert", "N1", ["not:not", "T"], 2], ["Insert", "N1", ["none:None", "T"], 3]]
xunyuw/incubator-superset@2d23ae1065cd52770ace6cd072aebb74fe195394
[bugfix] df is None breaks df.empty check (#5657)
[ { "sha": "dcd51ed4879695ed35e75ec897dc46c374eb6ba7", "filename": "superset/viz.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/2d23ae1065cd52770ace6cd072aebb74fe195394/superset%2Fviz.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/2d23ae1065cd52770ace6cd072aebb74fe195394/superset%2Fviz.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviz.py?ref=2d23ae1065cd52770ace6cd072aebb74fe195394", "patch": "@@ -203,7 +203,7 @@ def get_df(self, query_obj=None):\n # be considered as the default ISO date format\n # If the datetime format is unix, the parse will use the corresponding\n # parsing logic.\n- if not df.empty:\n+ if df is not None and not df.empty:\n if DTTM_ALIAS in df.columns:\n if timestamp_format in ('epoch_s', 'epoch_ms'):\n # Column has already been formatted as a timestamp." } ]
incubator-superset
ebe585df3d030d3f4ad167c1a93e70ba9cf741e2
80e777823bc4b2f71acec0fd2f0507e82f64dd22
superset/views/base.py
https://github.com/xunyuw/incubator-superset
true
false
true
@@ -45,7 +45,7 @@ def get_error_msg(): def json_error_response(msg=None, status=500, stacktrace=None, payload=None, link=None): if not payload: - payload = {'error': str(msg)} + payload = {'error': '{}'.format(msg)} if stacktrace: payload['stacktrace'] = stacktrace if link:
payload = { 'error' : str ( msg ) }
payload = { 'error' : '{}' . format ( msg ) }
SINGLE_STMT
[["Insert", ["call", 3, 29, 3, 37], ["attribute", "N0"], 0], ["Insert", "N0", ["string:'{}'", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:format", "T"], 2], ["Delete", ["identifier:str", 3, 29, 3, 32]]]
xunyuw/incubator-superset@ebe585df3d030d3f4ad167c1a93e70ba9cf741e2
[bugfix] Encode unicode error message response (#5687) * Fix unicode error message response * Update
[ { "sha": "f24982083f861db2c09b3fef98e3adf1fdb502e2", "filename": "superset/views/base.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/ebe585df3d030d3f4ad167c1a93e70ba9cf741e2/superset%2Fviews%2Fbase.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/ebe585df3d030d3f4ad167c1a93e70ba9cf741e2/superset%2Fviews%2Fbase.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fviews%2Fbase.py?ref=ebe585df3d030d3f4ad167c1a93e70ba9cf741e2", "patch": "@@ -45,7 +45,7 @@ def get_error_msg():\n \n def json_error_response(msg=None, status=500, stacktrace=None, payload=None, link=None):\n if not payload:\n- payload = {'error': str(msg)}\n+ payload = {'error': '{}'.format(msg)}\n if stacktrace:\n payload['stacktrace'] = stacktrace\n if link:" } ]
incubator-superset
09d46a7864a67d3e2ef70fb41a5c43b28d6c14c9
ba329440bcece3f2e7033b8c02bd27460b7dfb11
tests/fixtures/datasource.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -10,7 +10,7 @@ datasource_post = { 'column_formats': {'ratio': '.2%'}, 'description': 'Adding a DESCRip', 'default_endpoint': '', - 'filter_select': True, + 'filter_select_enabled': True, 'name': 'birth_names', 'datasource_name': 'birth_names', 'type': 'table',
'filter_select' : True ,
'filter_select_enabled' : True ,
CHANGE_STRING_LITERAL
[["Update", ["string:'filter_select'", 3, 5, 3, 20], "'filter_select_enabled'"]]
xunyuw/incubator-superset@09d46a7864a67d3e2ef70fb41a5c43b28d6c14c9
Fix tests
[ { "sha": "647166f9ce41837a5415d1cf8467a5486bd05a7a", "filename": "tests/fixtures/datasource.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/09d46a7864a67d3e2ef70fb41a5c43b28d6c14c9/tests%2Ffixtures%2Fdatasource.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/09d46a7864a67d3e2ef70fb41a5c43b28d6c14c9/tests%2Ffixtures%2Fdatasource.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/tests%2Ffixtures%2Fdatasource.py?ref=09d46a7864a67d3e2ef70fb41a5c43b28d6c14c9", "patch": "@@ -10,7 +10,7 @@\n 'column_formats': {'ratio': '.2%'},\n 'description': 'Adding a DESCRip',\n 'default_endpoint': '',\n- 'filter_select': True,\n+ 'filter_select_enabled': True,\n 'name': 'birth_names',\n 'datasource_name': 'birth_names',\n 'type': 'table'," } ]
incubator-superset
df341ffd26987e08b5559b9c5a2a794cd5c6d535
50c701c0e86de08385e97d34b2dcd41e6c5ff5cd
superset/models/helpers.py
https://github.com/xunyuw/incubator-superset
true
false
false
@@ -279,7 +279,7 @@ class AuditMixinNullable(AuditMixin): return Markup( '<span class="no-wrap">{}</span>'.format(self.changed_on)) - @renders('modified') + @renders('changed_on') def modified(self): return humanize.naturaltime(datetime.now() - self.changed_on)
@ renders ( 'modified' ) def modified ( self ) : return humanize . naturaltime ( datetime . now ( ) - self . changed_on )
@ renders ( 'changed_on' ) def modified ( self ) : return humanize . naturaltime ( datetime . now ( ) - self . changed_on )
CHANGE_STRING_LITERAL
[["Update", ["string:'modified'", 3, 14, 3, 24], "'changed_on'"]]
xunyuw/incubator-superset@df341ffd26987e08b5559b9c5a2a794cd5c6d535
Sorting tables by "modified" is broken (#6027) * Fix sorting by modified * Fix column name in decorator
[ { "sha": "25b808c3968bed18de94384fdf5e8d7aaefdf75d", "filename": "superset/models/helpers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/xunyuw/incubator-superset/blob/df341ffd26987e08b5559b9c5a2a794cd5c6d535/superset%2Fmodels%2Fhelpers.py", "raw_url": "https://github.com/xunyuw/incubator-superset/raw/df341ffd26987e08b5559b9c5a2a794cd5c6d535/superset%2Fmodels%2Fhelpers.py", "contents_url": "https://api.github.com/repos/xunyuw/incubator-superset/contents/superset%2Fmodels%2Fhelpers.py?ref=df341ffd26987e08b5559b9c5a2a794cd5c6d535", "patch": "@@ -279,7 +279,7 @@ def changed_on_(self):\n return Markup(\n '<span class=\"no-wrap\">{}</span>'.format(self.changed_on))\n \n- @renders('modified')\n+ @renders('changed_on')\n def modified(self):\n return humanize.naturaltime(datetime.now() - self.changed_on)\n " } ]
telebot
597bf7b6cdcd42f315e490c9d8d19ec4c7cfaf60
12d659d50d208de69610b4f788bbceaa292d03b5
main.py
https://github.com/alexeyantonov/telebot
true
false
true
@@ -133,7 +133,7 @@ class WebhookHandler(webapp2.RequestHandler): if getEnabled(chat_id): try: resp1 = json.load(urllib2.urlopen('http://www.simsimi.com/requestChat?lc=en&ft=1.0&req=' + urllib.quote_plus(text.encode('utf-8')))) - back = resp1.get('res') + back = resp1.get('res').get('msg') except urllib2.HTTPError, err: logging.error(err) back = str(err)
back = resp1 . get ( 'res' )
back = resp1 . get ( 'res' ) . get ( 'msg' )
ADD_METHOD_CALL
[["Insert", ["call", 3, 28, 3, 44], ["attribute", "N0"], 0], ["Insert", ["call", 3, 28, 3, 44], ["argument_list", "N1"], 1], ["Move", "N0", ["call", 3, 28, 3, 44], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:get", "T"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["string:'msg'", "T"], 1], ["Insert", "N1", ["):)", "T"], 2]]
alexeyantonov/telebot@597bf7b6cdcd42f315e490c9d8d19ec4c7cfaf60
Fixed simsimi response retrival. Now the "res" attribute is a dictionary with a single item named "msg". Changed code to get that after "res"
[ { "sha": "499812f920d9baed1f1504375b79a2748a4aedde", "filename": "main.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/alexeyantonov/telebot/blob/597bf7b6cdcd42f315e490c9d8d19ec4c7cfaf60/main.py", "raw_url": "https://github.com/alexeyantonov/telebot/raw/597bf7b6cdcd42f315e490c9d8d19ec4c7cfaf60/main.py", "contents_url": "https://api.github.com/repos/alexeyantonov/telebot/contents/main.py?ref=597bf7b6cdcd42f315e490c9d8d19ec4c7cfaf60", "patch": "@@ -133,7 +133,7 @@ def reply(msg=None, img=None):\n if getEnabled(chat_id):\n try:\n resp1 = json.load(urllib2.urlopen('http://www.simsimi.com/requestChat?lc=en&ft=1.0&req=' + urllib.quote_plus(text.encode('utf-8'))))\n- back = resp1.get('res')\n+ back = resp1.get('res').get('msg')\n except urllib2.HTTPError, err:\n logging.error(err)\n back = str(err)" } ]
matplotlib
67d39ecbc1104691bfdff090cb3de79f11f7bc53
b8dcc07155f0f78c952eda956761b59bf3bb7d9c
lib/matplotlib/colors.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -323,7 +323,7 @@ def looks_like_color(c): if is_string_like(c): if cnames.has_key(c): return True elif len(c)==1: return True - elif len(s)==7 and c.startswith('#') and len(s)==7: return True + elif len(c)==7 and c.startswith('#') and len(c)==7: return True else: return False elif iterable(c) and len(c)==3: try:
if cnames . has_key ( c ) : return True elif len ( c ) == 1 : return True elif len ( s ) == 7 and c . startswith ( '#' ) and len ( s ) == 7 : return True else : return False
if cnames . has_key ( c ) : return True elif len ( c ) == 1 : return True elif len ( c ) == 7 and c . startswith ( '#' ) and len ( c ) == 7 : return True else : return False
SINGLE_STMT
[["Update", ["identifier:s", 3, 54, 3, 55], "c"], ["Update", ["identifier:s", 3, 18, 3, 19], "c"]]
dsquareindia/matplotlib@67d39ecbc1104691bfdff090cb3de79f11f7bc53
null
null
matplotlib
ca5d67d1fbfc0bf537ee783b1c6b76b29792e554
73480ea5eff913b89bf8f31f577cb605f5fad400
lib/matplotlib/ticker.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -301,7 +301,7 @@ class NewScalarFormatter(Formatter): locs = self.locs ave_loc = average(locs) if ave_loc: # dont want to take log10(0) - ave_oom = math.floor(math.log10(absolute(ave_loc))) + ave_oom = math.floor(math.log10(average(absolute(locs)))) range_oom = math.floor(math.log10(range)) if absolute(ave_oom-range_oom) >= 4: # four sig-figs if ave_loc < 0:
ave_oom = math . floor ( math . log10 ( absolute ( ave_loc ) ) )
ave_oom = math . floor ( math . log10 ( average ( absolute ( locs ) ) ) )
SINGLE_STMT
[["Insert", ["call", 3, 45, 3, 62], ["identifier:average", "T"], 0], ["Insert", ["call", 3, 45, 3, 62], ["argument_list", "N0"], 1], ["Move", "N0", ["(:(", 3, 53, 3, 54], 0], ["Insert", "N0", ["call", "N1"], 1], ["Insert", "N0", ["):)", "T"], 2], ["Move", "N1", ["identifier:absolute", 3, 45, 3, 53], 0], ["Move", "N1", ["argument_list", 3, 53, 3, 62], 1], ["Insert", ["argument_list", 3, 53, 3, 62], ["(:(", "T"], 0], ["Update", ["identifier:ave_loc", 3, 54, 3, 61], "locs"]]
dsquareindia/matplotlib@ca5d67d1fbfc0bf537ee783b1c6b76b29792e554
null
null
matplotlib
6b752ed511f9906233a597aab07bbcff70c1bcaa
19c05526f9239dff1c99b49af8b03616e0fb3dbd
lib/matplotlib/backends/backend_agg.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -267,7 +267,7 @@ class RendererAgg(RendererBase): w,h = h,w x -= w - key = s, size, dpi, rgb, angle, rcParams['font.latex.package'], rcParams['text.tex.engine'] + key = s, size, dpi, rgb, angle, rcParams['font.latex.package'], rcParams['text.tex.engine'], rcParams['font.family'] im = self.texd.get(key) if im is None: Z = self.texmanager.get_rgba(s, size, dpi, rgb)
key = s , size , dpi , rgb , angle , rcParams [ 'font.latex.package' ] , rcParams [ 'text.tex.engine' ]
key = s , size , dpi , rgb , angle , rcParams [ 'font.latex.package' ] , rcParams [ 'text.tex.engine' ] , rcParams [ 'font.family' ]
SINGLE_STMT
[["Insert", ["expression_list", 3, 15, 3, 100], [",:,", "T"], 13], ["Insert", ["expression_list", 3, 15, 3, 100], ["subscript", "N0"], 14], ["Insert", "N0", ["identifier:rcParams", "T"], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["string:'font.family'", "T"], 2], ["Insert", "N0", ["]:]", "T"], 3]]
dsquareindia/matplotlib@6b752ed511f9906233a597aab07bbcff70c1bcaa
null
null
neo-python
a736b00681d154f8b6ab74f098e1115a84901f82
86a62dfd017063f06e08c692c300d7e9c6126701
neo/VM/ScriptBuilder.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -46,7 +46,7 @@ class ScriptBuilder(object): return self.WriteUInt16(value, endian) elif value <= 0xFFFFFFFF: - self.WriteByte(0xfd) + self.WriteByte(0xfe) return self.WriteUInt32(value, endian) else:
elif value <= 0xFFFFFFFF : self . WriteByte ( 0xfd )
elif value <= 0xFFFFFFFF : self . WriteByte ( 0xfe )
CHANGE_NUMERIC_LITERAL
[["Update", ["integer:0xfd", 3, 28, 3, 32], "0xfe"]]
Ranchosm335/neo-python@a736b00681d154f8b6ab74f098e1115a84901f82
fix WriteVarInt of ScriptBuilder
[ { "sha": "52f95ab344f36ce40ee29679a49e1a178b6db793", "filename": "neo/VM/ScriptBuilder.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/a736b00681d154f8b6ab74f098e1115a84901f82/neo%2FVM%2FScriptBuilder.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/a736b00681d154f8b6ab74f098e1115a84901f82/neo%2FVM%2FScriptBuilder.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FVM%2FScriptBuilder.py?ref=a736b00681d154f8b6ab74f098e1115a84901f82", "patch": "@@ -46,7 +46,7 @@ def WriteVarInt(self, value, endian=\"<\"):\n return self.WriteUInt16(value, endian)\n \n elif value <= 0xFFFFFFFF:\n- self.WriteByte(0xfd)\n+ self.WriteByte(0xfe)\n return self.WriteUInt32(value, endian)\n \n else:" } ]
keras-retinanet
df3e648e563a98c7bbb3defc4bc9018c7bca23e5
19ed03b56e61f0f3ca68d775c66f6c422984a84c
keras_retinanet/models/retinanet.py
https://github.com/tickleliu/keras-retinanet
true
false
true
@@ -167,7 +167,7 @@ def retinanet_boxes(inputs, num_classes, nms=True, name='retinanet-boxes', *args predictions, anchors = model.outputs regression = keras.layers.Lambda(lambda x: x[:, :, :4])(predictions) classification = keras.layers.Lambda(lambda x: x[:, :, 4:4 + num_classes])(predictions) - other = keras.layers.Lambda(lambda x: x[:, :, 4:])(predictions) + other = keras.layers.Lambda(lambda x: x[:, :, 4 + num_classes:])(predictions) # apply predicted regression to anchors boxes = keras_retinanet.layers.RegressBoxes(name='boxes')([anchors, regression])
other = keras . layers . Lambda ( lambda x : x [ : , : , 4 : ] ) ( predictions )
other = keras . layers . Lambda ( lambda x : x [ : , : , 4 + num_classes : ] ) ( predictions )
SINGLE_STMT
[["Insert", ["slice", 3, 60, 3, 62], ["binary_operator", "N0"], 0], ["Move", "N0", ["integer:4", 3, 60, 3, 61], 0], ["Insert", "N0", ["+:+", "T"], 1], ["Insert", "N0", ["identifier:num_classes", "T"], 2]]
tickleliu/keras-retinanet@df3e648e563a98c7bbb3defc4bc9018c7bca23e5
Fix bug in selecting columns for 'others'.
[ { "sha": "3b91b99bea010afb254b8acf953ba0353a29f7d9", "filename": "keras_retinanet/models/retinanet.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/tickleliu/keras-retinanet/blob/df3e648e563a98c7bbb3defc4bc9018c7bca23e5/keras_retinanet%2Fmodels%2Fretinanet.py", "raw_url": "https://github.com/tickleliu/keras-retinanet/raw/df3e648e563a98c7bbb3defc4bc9018c7bca23e5/keras_retinanet%2Fmodels%2Fretinanet.py", "contents_url": "https://api.github.com/repos/tickleliu/keras-retinanet/contents/keras_retinanet%2Fmodels%2Fretinanet.py?ref=df3e648e563a98c7bbb3defc4bc9018c7bca23e5", "patch": "@@ -167,7 +167,7 @@ def retinanet_boxes(inputs, num_classes, nms=True, name='retinanet-boxes', *args\n predictions, anchors = model.outputs\n regression = keras.layers.Lambda(lambda x: x[:, :, :4])(predictions)\n classification = keras.layers.Lambda(lambda x: x[:, :, 4:4 + num_classes])(predictions)\n- other = keras.layers.Lambda(lambda x: x[:, :, 4:])(predictions)\n+ other = keras.layers.Lambda(lambda x: x[:, :, 4 + num_classes:])(predictions)\n \n # apply predicted regression to anchors\n boxes = keras_retinanet.layers.RegressBoxes(name='boxes')([anchors, regression])" } ]
keras-retinanet
30d45dcb5b72cf15d5f01d552851dab1956362e8
958a7ebd081380fb31950cc2cbeaa4c4628dfaaa
keras_retinanet/preprocessing/coco.py
https://github.com/tickleliu/keras-retinanet
true
false
true
@@ -74,7 +74,7 @@ class CocoGenerator(keras_retinanet.preprocessing.Generator): return self.label_to_name(self.coco_label_to_label(coco_label)) def label_to_coco_label(self, label): - return self.coco_labels[coco_label] + return self.coco_labels[label] def image_aspect_ratio(self, image_index): image = self.coco.loadImgs(self.image_ids[image_index])[0]
return self . coco_labels [ coco_label ]
return self . coco_labels [ label ]
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:coco_label", 3, 33, 3, 43], "label"]]
tickleliu/keras-retinanet@30d45dcb5b72cf15d5f01d552851dab1956362e8
Fix typo in COCO generator.
[ { "sha": "1d680c44a9245dac125861c77bab938c1c464bb8", "filename": "keras_retinanet/preprocessing/coco.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/tickleliu/keras-retinanet/blob/30d45dcb5b72cf15d5f01d552851dab1956362e8/keras_retinanet%2Fpreprocessing%2Fcoco.py", "raw_url": "https://github.com/tickleliu/keras-retinanet/raw/30d45dcb5b72cf15d5f01d552851dab1956362e8/keras_retinanet%2Fpreprocessing%2Fcoco.py", "contents_url": "https://api.github.com/repos/tickleliu/keras-retinanet/contents/keras_retinanet%2Fpreprocessing%2Fcoco.py?ref=30d45dcb5b72cf15d5f01d552851dab1956362e8", "patch": "@@ -74,7 +74,7 @@ def coco_label_to_name(self, coco_label):\n return self.label_to_name(self.coco_label_to_label(coco_label))\n \n def label_to_coco_label(self, label):\n- return self.coco_labels[coco_label]\n+ return self.coco_labels[label]\n \n def image_aspect_ratio(self, image_index):\n image = self.coco.loadImgs(self.image_ids[image_index])[0]" } ]
keras-retinanet
a0b537210a32a294f9696da3b1a1cb4d9e814438
9d2219d5e5f3e4139333d218bbea06ef92d0d917
keras_retinanet/preprocessing/csv_generator.py
https://github.com/tickleliu/keras-retinanet
true
false
false
@@ -122,7 +122,7 @@ class CSVGenerator(Generator): # csv with img_path, x1, y1, x2, y2, class_name try: with _open_for_csv(csv_data_file) as file: - self.image_data = _read_annotations(csv.reader(file, delimiter=',')) + self.image_data = _read_annotations(csv.reader(file, delimiter=','), self.classes) except ValueError as e: raise_from(ValueError('invalid CSV annotations file: {}: {}'.format(csv_data_file, e)), None) self.image_names = self.image_data.keys()
self . image_data = _read_annotations ( csv . reader ( file , delimiter = ',' ) )
self . image_data = _read_annotations ( csv . reader ( file , delimiter = ',' ) , self . classes )
SAME_FUNCTION_MORE_ARGS
[["Insert", ["argument_list", 3, 52, 3, 85], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 52, 3, 85], ["attribute", "N0"], 3], ["Insert", "N0", ["identifier:self", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:classes", "T"], 2]]
tickleliu/keras-retinanet@a0b537210a32a294f9696da3b1a1cb4d9e814438
Fix missing argument in CSVGenerator.
[ { "sha": "6659fd83e81361c6fb50644e7483c05cc76ebca6", "filename": "keras_retinanet/preprocessing/csv_generator.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/tickleliu/keras-retinanet/blob/a0b537210a32a294f9696da3b1a1cb4d9e814438/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py", "raw_url": "https://github.com/tickleliu/keras-retinanet/raw/a0b537210a32a294f9696da3b1a1cb4d9e814438/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py", "contents_url": "https://api.github.com/repos/tickleliu/keras-retinanet/contents/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py?ref=a0b537210a32a294f9696da3b1a1cb4d9e814438", "patch": "@@ -122,7 +122,7 @@ def __init__(\n # csv with img_path, x1, y1, x2, y2, class_name\n try:\n with _open_for_csv(csv_data_file) as file:\n- self.image_data = _read_annotations(csv.reader(file, delimiter=','))\n+ self.image_data = _read_annotations(csv.reader(file, delimiter=','), self.classes)\n except ValueError as e:\n raise_from(ValueError('invalid CSV annotations file: {}: {}'.format(csv_data_file, e)), None)\n self.image_names = self.image_data.keys()" } ]
keras-retinanet
550448d031270d8b07e20b60246d3a509213476f
74a4e910617aca72538c6e8550907d67776c5895
keras_retinanet/preprocessing/csv_generator.py
https://github.com/tickleliu/keras-retinanet
true
false
false
@@ -125,7 +125,7 @@ class CSVGenerator(Generator): self.image_data = _read_annotations(csv.reader(file, delimiter=','), self.classes) except ValueError as e: raise_from(ValueError('invalid CSV annotations file: {}: {}'.format(csv_data_file, e)), None) - self.image_names = self.image_data.keys() + self.image_names = list(self.image_data.keys()) super(CSVGenerator, self).__init__(*args, **kwargs)
self . image_names = self . image_data . keys ( )
self . image_names = list ( self . image_data . keys ( ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["call", 3, 28, 3, 50], ["identifier:list", "T"], 0], ["Insert", ["call", 3, 28, 3, 50], ["argument_list", "N0"], 1], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["call", 3, 28, 3, 50], 1], ["Insert", "N0", ["):)", "T"], 2]]
tickleliu/keras-retinanet@550448d031270d8b07e20b60246d3a509213476f
Fix CSVGenerator.image_names.
[ { "sha": "d8e733d627821c84c59e64652b665f4278e51e58", "filename": "keras_retinanet/preprocessing/csv_generator.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/tickleliu/keras-retinanet/blob/550448d031270d8b07e20b60246d3a509213476f/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py", "raw_url": "https://github.com/tickleliu/keras-retinanet/raw/550448d031270d8b07e20b60246d3a509213476f/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py", "contents_url": "https://api.github.com/repos/tickleliu/keras-retinanet/contents/keras_retinanet%2Fpreprocessing%2Fcsv_generator.py?ref=550448d031270d8b07e20b60246d3a509213476f", "patch": "@@ -125,7 +125,7 @@ def __init__(\n self.image_data = _read_annotations(csv.reader(file, delimiter=','), self.classes)\n except ValueError as e:\n raise_from(ValueError('invalid CSV annotations file: {}: {}'.format(csv_data_file, e)), None)\n- self.image_names = self.image_data.keys()\n+ self.image_names = list(self.image_data.keys())\n \n super(CSVGenerator, self).__init__(*args, **kwargs)\n " } ]
GW2UtilityBot
6100f663817a3a97c0f681203b3f3962fc9d19d7
283818842994f12a2463323ff0253bb6a7b713e6
src/dailies.py
https://github.com/julianviso/GW2UtilityBot
true
false
true
@@ -56,7 +56,7 @@ class Dailies(): @commands.command() async def tomorrowsPVE(self): - tomorrowsDailies = urlopen("https://api.guildwars2.com/v2/achivements/daily/tomorrow") + tomorrowsDailies = urlopen("https://api.guildwars2.com/v2/achievements/daily/tomorrow") data = json.load(tomorrowsDailies) results = [] for pveData in data['pve']:
tomorrowsDailies = urlopen ( "https://api.guildwars2.com/v2/achivements/daily/tomorrow" )
tomorrowsDailies = urlopen ( "https://api.guildwars2.com/v2/achievements/daily/tomorrow" )
CHANGE_STRING_LITERAL
[["Update", ["string:\"https://api.guildwars2.com/v2/achivements/daily/tomorrow\"", 3, 36, 3, 94], "\"https://api.guildwars2.com/v2/achievements/daily/tomorrow\""]]
julianviso/GW2UtilityBot@6100f663817a3a97c0f681203b3f3962fc9d19d7
Fixing bug with tomorrows pve dailies
[ { "sha": "beee90349275de98899027d60f1f5df7607dbe9c", "filename": "src/dailies.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/julianviso/GW2UtilityBot/blob/6100f663817a3a97c0f681203b3f3962fc9d19d7/src%2Fdailies.py", "raw_url": "https://github.com/julianviso/GW2UtilityBot/raw/6100f663817a3a97c0f681203b3f3962fc9d19d7/src%2Fdailies.py", "contents_url": "https://api.github.com/repos/julianviso/GW2UtilityBot/contents/src%2Fdailies.py?ref=6100f663817a3a97c0f681203b3f3962fc9d19d7", "patch": "@@ -56,7 +56,7 @@ async def dailyPVE(self):\n \n @commands.command()\n async def tomorrowsPVE(self):\n- tomorrowsDailies = urlopen(\"https://api.guildwars2.com/v2/achivements/daily/tomorrow\")\n+ tomorrowsDailies = urlopen(\"https://api.guildwars2.com/v2/achievements/daily/tomorrow\")\n data = json.load(tomorrowsDailies)\n results = []\n for pveData in data['pve']:" } ]
matplotlib
abe527d53c3532782f3a5bb4ae95e20608e2d28c
21e030fbfae33fb4429aafc880514f039016f69f
lib/matplotlib/_mathtext_data.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -2439,7 +2439,7 @@ stix_virtual_fonts = { (0x0030, 0x0039, 'rm', 0x1d7e2), # 0-9 (0x0041, 0x005a, 'it', 0x1d608), # A-Z (0x0061, 0x007a, 'it', 0x1d622), # a-z - (0x0391, 0x03a9, 'it', 0xe1bf), # \Alpha-\Omega + (0x0391, 0x03a9, 'rm', 0xe17d), # \Alpha-\Omega (0x03b1, 0x03c9, 'it', 0xe1d8), # \alpha-\omega (0x03d1, 0x03d1, 'it', 0xe1f2), # theta variant (0x03d5, 0x03d5, 'it', 0xe1f3), # phi variant
( 0x0391 , 0x03a9 , 'it' , 0xe1bf ) ,
( 0x0391 , 0x03a9 , 'rm' , 0xe17d ) ,
SINGLE_STMT
[["Update", ["string:'it'", 3, 30, 3, 34], "'rm'"], ["Update", ["integer:0xe1bf", 3, 36, 3, 42], "0xe17d"]]
dsquareindia/matplotlib@abe527d53c3532782f3a5bb4ae95e20608e2d28c
null
null
matplotlib
2f91670514bcbd35103dcd441ce1ec2f80412611
2a9e66e7e1dfe21d008fa0527eb8945fbe3441d7
lib/matplotlib/axes.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -4728,7 +4728,7 @@ class Axes(martist.Artist): X = X.ravel() Y = Y.ravel() - coords = npy.zeros(((Nx * Ny), 2), X.dtype) + coords = npy.zeros(((Nx * Ny), 2), dtype=float) coords[:, 0] = X coords[:, 1] = Y
coords = npy . zeros ( ( ( Nx * Ny ) , 2 ) , X . dtype )
coords = npy . zeros ( ( ( Nx * Ny ) , 2 ) , dtype = float )
SINGLE_STMT
[["Insert", ["argument_list", 3, 27, 3, 52], ["keyword_argument", "N0"], 3], ["Update", ["identifier:X", 3, 44, 3, 45], "dtype"], ["Move", "N0", ["identifier:X", 3, 44, 3, 45], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Update", ["identifier:dtype", 3, 46, 3, 51], "float"], ["Move", "N0", ["identifier:dtype", 3, 46, 3, 51], 2], ["Delete", [".:.", 3, 45, 3, 46]], ["Delete", ["attribute", 3, 44, 3, 51]]]
dsquareindia/matplotlib@2f91670514bcbd35103dcd441ce1ec2f80412611
null
null
matplotlib
dbe0f68fa90abe83f8ce02fac9c7f71784477a8c
fbc517c2e70afe0fa1e679757b93fec71f4f3dd6
examples/mathtext_demo.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -22,7 +22,7 @@ ax.text(1, 1.6, tex, fontsize=20, va='bottom') ax.legend(("Foo", "Testing $x^2$")) -ax.set_title(r'$\Delta_i^j \hspace{0.4} \rm{versus} \hspace{0.4} \Delta_{i+1}^j$', fontsize=20) +ax.set_title(r'$\Delta_i^j \hspace{0.4} \mathrm{versus} \hspace{0.4} \Delta_{i+1}^j$', fontsize=20) #fig.savefig('mathtext_demo') show()
ax . set_title ( r'$\Delta_i^j \hspace{0.4} \rm{versus} \hspace{0.4} \Delta_{i+1}^j$' , fontsize = 20 )
ax . set_title ( r'$\Delta_i^j \hspace{0.4} \mathrm{versus} \hspace{0.4} \Delta_{i+1}^j$' , fontsize = 20 )
CHANGE_STRING_LITERAL
[["Update", ["string:r'$\\Delta_i^j \\hspace{0.4} \\rm{versus} \\hspace{0.4} \\Delta_{i+1}^j$'", 3, 14, 3, 82], "r'$\\Delta_i^j \\hspace{0.4} \\mathrm{versus} \\hspace{0.4} \\Delta_{i+1}^j$'"]]
dsquareindia/matplotlib@dbe0f68fa90abe83f8ce02fac9c7f71784477a8c
null
null
matplotlib
28f5328760bfc606a7e9b4dfd707896ed9118bb0
5c7a23b25cee675de9089623be87c8dc3fed26dd
lib/matplotlib/__init__.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -397,7 +397,7 @@ def _get_configdir(): if os.path.exists(p): if not _is_writable_dir(p): - raise RuntimeError("'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored "%h) + raise RuntimeError("'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored "% (h, h)) else: if not _is_writable_dir(h): raise RuntimeError("Failed to create %s/.matplotlib; consider setting MPLCONFIGDIR to a writable directory for matplotlib configuration data"%h)
raise RuntimeError ( "'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored " % h )
raise RuntimeError ( "'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored " % ( h , h ) )
CHANGE_BINARY_OPERAND
[["Insert", ["binary_operator", 3, 32, 3, 230], ["tuple", "N0"], 2], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["identifier:h", 3, 229, 3, 230], 1], ["Insert", "N0", [",:,", "T"], 2], ["Insert", "N0", ["identifier:h", "T"], 3], ["Insert", "N0", ["):)", "T"], 4]]
dsquareindia/matplotlib@28f5328760bfc606a7e9b4dfd707896ed9118bb0
null
null
matplotlib
6fc549b41f89e8d32d53b59960e5b8fe0c40f929
78ea770ab3d1380ce248d7aacfce5ac8c14a9e4a
lib/matplotlib/__init__.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -397,7 +397,7 @@ def _get_configdir(): if os.path.exists(p): if not _is_writable_dir(p): - raise RuntimeError("'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored "%h) + raise RuntimeError("'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored "% (h, h)) else: if not _is_writable_dir(h): raise RuntimeError("Failed to create %s/.matplotlib; consider setting MPLCONFIGDIR to a writable directory for matplotlib configuration data"%h)
raise RuntimeError ( "'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored " % h )
raise RuntimeError ( "'%s' is not a writable dir; you must set %s/.matplotlib to be a writable dir. You can also set environment variable MPLCONFIGDIR to any writable directory where you want matplotlib data stored " % ( h , h ) )
CHANGE_BINARY_OPERAND
[["Insert", ["binary_operator", 3, 32, 3, 230], ["tuple", "N0"], 2], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["identifier:h", 3, 229, 3, 230], 1], ["Insert", "N0", [",:,", "T"], 2], ["Insert", "N0", ["identifier:h", "T"], 3], ["Insert", "N0", ["):)", "T"], 4]]
dsquareindia/matplotlib@6fc549b41f89e8d32d53b59960e5b8fe0c40f929
null
null
matplotlib
1e7da2cddb17511959b2c0ad76f1060ea155af9a
280fd1244d0ca6e779597bf5c4261e6411479d9a
lib/matplotlib/colorbar.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -351,7 +351,7 @@ class ColorbarBase(cm.ScalarMappable): b = npy.array(locator()) b, ticks = self._locate(b) formatter.set_locs(b) - ticklabels = [formatter(t) for t in b] + ticklabels = [formatter(t, i) for i, t in enumerate(b)] offset_string = formatter.get_offset() return ticks, ticklabels, offset_string
ticklabels = [ formatter ( t ) for t in b ]
ticklabels = [ formatter ( t , i ) for i , t in enumerate ( b ) ]
SINGLE_STMT
[["Insert", ["for_in_clause", 3, 36, 3, 46], ["pattern_list", "N0"], 1], ["Insert", ["for_in_clause", 3, 36, 3, 46], ["call", "N1"], 4], ["Insert", ["argument_list", 3, 32, 3, 35], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 32, 3, 35], ["identifier:i", "T"], 3], ["Insert", "N0", ["identifier:i", "T"], 0], ["Insert", "N0", [",:,", "T"], 1], ["Move", "N0", ["identifier:t", 3, 40, 3, 41], 2], ["Insert", "N1", ["identifier:enumerate", "T"], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["identifier:b", 3, 45, 3, 46], 1], ["Insert", "N2", ["):)", "T"], 2]]
dsquareindia/matplotlib@1e7da2cddb17511959b2c0ad76f1060ea155af9a
null
null
matplotlib
579f9cdf2207b7b92e7435a45150af606831c4a4
2881611af8b5e4007eb1fb7058b09af67d25b520
lib/matplotlib/image.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -648,6 +648,6 @@ def pil_to_array( pilImage ): raise RuntimeError('Unknown image mode') x_str = im.tostring('raw',im.mode,0,-1) - x = np.fromstring(x_str,np.uint8) + x = npy.fromstring(x_str,npy.uint8) x.shape = im.size[1], im.size[0], 4 return x
x = np . fromstring ( x_str , np . uint8 )
x = npy . fromstring ( x_str , npy . uint8 )
SINGLE_STMT
[["Update", ["identifier:np", 3, 9, 3, 11], "npy"], ["Update", ["identifier:np", 3, 29, 3, 31], "npy"]]
dsquareindia/matplotlib@579f9cdf2207b7b92e7435a45150af606831c4a4
null
null
matplotlib
4374e848de4c54372a934b1c389da0be76598a6a
f24d06c35c6844656a35e61d9dbaa25ecf540543
lib/matplotlib/axes.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -1234,7 +1234,7 @@ class Axes(martist.Artist): # the bins, counts and patches lined up, but it throws off log # scaling. We'll ignore rects with zero height or width in # the auto-scaling - if isinstance(p, mpatches.Rectangle) and p.get_width()==0. or p.get_height()==0.: + if isinstance(p, mpatches.Rectangle) and p.get_width()==0. and p.get_height()==0.: return vertices = p.get_patch_transform().transform(p.get_path().vertices)
if isinstance ( p , mpatches . Rectangle ) and p . get_width ( ) == 0. or p . get_height ( ) == 0. : return
if isinstance ( p , mpatches . Rectangle ) and p . get_width ( ) == 0. and p . get_height ( ) == 0. : return
CHANGE_BINARY_OPERATOR
[["Insert", ["boolean_operator", 3, 12, 3, 89], ["and:and", "T"], 1], ["Delete", ["or:or", 3, 68, 3, 70]]]
dsquareindia/matplotlib@4374e848de4c54372a934b1c389da0be76598a6a
null
null
matplotlib
f87b2b0f3639ced69e158a032b7fba39d4654951
82959ca3404b5313f8096c713a56583946775f82
lib/matplotlib/axes.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -1234,7 +1234,7 @@ class Axes(martist.Artist): # the bins, counts and patches lined up, but it throws off log # scaling. We'll ignore rects with zero height or width in # the auto-scaling - if isinstance(p, mpatches.Rectangle) and p.get_width()==0. and p.get_height()==0.: + if isinstance(p, mpatches.Rectangle) and (p.get_width()==0. or p.get_height()==0.): return vertices = p.get_patch_transform().transform(p.get_path().vertices)
if isinstance ( p , mpatches . Rectangle ) and p . get_width ( ) == 0. and p . get_height ( ) == 0. : return
if isinstance ( p , mpatches . Rectangle ) and ( p . get_width ( ) == 0. or p . get_height ( ) == 0. ) : return
SINGLE_STMT
[["Insert", ["boolean_operator", 3, 12, 3, 90], ["parenthesized_expression", "N0"], 2], ["Insert", "N0", ["(:(", "T"], 0], ["Insert", "N0", ["boolean_operator", "N1"], 1], ["Insert", "N0", ["):)", "T"], 2], ["Move", "N1", ["comparison_operator", 3, 50, 3, 67], 0], ["Insert", "N1", ["or:or", "T"], 1], ["Move", "N1", ["comparison_operator", 3, 72, 3, 90], 2], ["Delete", ["and:and", 3, 68, 3, 71]]]
dsquareindia/matplotlib@f87b2b0f3639ced69e158a032b7fba39d4654951
null
null
matplotlib
4695c538c017756d85c5efe767ad46233787fd38
f42da8a909d00343f99906e270dd9d64b27cb10e
lib/matplotlib/mathtext.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -656,7 +656,7 @@ class TruetypeFonts(Fonts): def get_underline_thickness(self, font, fontsize, dpi): cached_font = self._get_font(font) - return cached_font.font.underline_thickness / 64.0 / fontsize * (10.0 * dpi / 100.0) + return (cached_font.font.underline_thickness / 64.0 / fontsize) * (dpi) def get_kern(self, font1, fontclass1, sym1, fontsize1, font2, fontclass2, sym2, fontsize2, dpi):
return cached_font . font . underline_thickness / 64.0 / fontsize * ( 10.0 * dpi / 100.0 )
return ( cached_font . font . underline_thickness / 64.0 / fontsize ) * ( dpi )
SINGLE_STMT
[["Insert", ["binary_operator", 3, 16, 3, 93], ["parenthesized_expression", "N0"], 0], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["binary_operator", 3, 16, 3, 70], 1], ["Insert", "N0", ["):)", "T"], 2], ["Move", ["parenthesized_expression", 3, 73, 3, 93], ["identifier:dpi", 3, 81, 3, 84], 1], ["Delete", ["float:10.0", 3, 74, 3, 78]], ["Delete", ["*:*", 3, 79, 3, 80]], ["Delete", ["binary_operator", 3, 74, 3, 84]], ["Delete", ["/:/", 3, 85, 3, 86]], ["Delete", ["float:100.0", 3, 87, 3, 92]], ["Delete", ["binary_operator", 3, 74, 3, 92]]]
dsquareindia/matplotlib@4695c538c017756d85c5efe767ad46233787fd38
null
null
matplotlib
f7656d47e2e4c019f252cad85ae7efed48bc6c05
7f01a7c778f3ab0e63b1690499d8861c188b5aa4
examples/tests/backend_driver.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -22,7 +22,7 @@ import matplotlib.backends as mplbe all_backends = [b.lower() for b in mplbe.all_backends] all_backends.extend(['cairo.png', 'cairo.ps', 'cairo.pdf', 'cairo.svg']) -pylab_dir = os.path.join('..', 'pylab') +pylab_dir = os.path.join('..', 'pylab_examples') pylab_files = [ 'alignment_test.py', 'arctest.py',
pylab_dir = os . path . join ( '..' , 'pylab' )
pylab_dir = os . path . join ( '..' , 'pylab_examples' )
CHANGE_STRING_LITERAL
[["Update", ["string:'pylab'", 3, 32, 3, 39], "'pylab_examples'"]]
dsquareindia/matplotlib@f7656d47e2e4c019f252cad85ae7efed48bc6c05
null
null
awx
27948a8dd94d93f12d02223d400de80cfa4da311
3c23e479bfa8fae798adf51162a66ca1e135cbfa
awx/main/models/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -735,7 +735,7 @@ class Project(CommonModel): elif not self.get_project_path(): return 'missing' else: - return 'successsful' + return 'successful' elif not self.get_project_path(): return 'missing' else:
else : return 'successsful'
else : return 'successful'
CHANGE_STRING_LITERAL
[["Update", ["string:'successsful'", 3, 24, 3, 37], "'successful'"]]
gconsidine/awx@27948a8dd94d93f12d02223d400de80cfa4da311
AC-431. Fixed misspelled project status.
[ { "sha": "752e0a4dc0b808629c6b17f148c35cd04f7b6f06", "filename": "awx/main/models/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/27948a8dd94d93f12d02223d400de80cfa4da311/awx%2Fmain%2Fmodels%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/27948a8dd94d93f12d02223d400de80cfa4da311/awx%2Fmain%2Fmodels%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2F__init__.py?ref=27948a8dd94d93f12d02223d400de80cfa4da311", "patch": "@@ -735,7 +735,7 @@ def status(self):\n elif not self.get_project_path():\n return 'missing'\n else:\n- return 'successsful'\n+ return 'successful'\n elif not self.get_project_path():\n return 'missing'\n else:" } ]
awx
c20300f6ce1c4d0946376366b436b46a543d45df
54e0b814bf6fc978d6c741687443083f6b47317a
awx/main/models/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -1608,7 +1608,7 @@ class JobTemplate(CommonModel): for pw in inventory_source.source_passwords_needed: if pw not in needed: needed.append(pw) - return bool(len(needed) == 0) + return bool(self.credential and not len(needed)) class Job(CommonModelNameNotUnique):
return bool ( len ( needed ) == 0 )
return bool ( self . credential and not len ( needed ) )
SINGLE_STMT
[["Insert", ["argument_list", 3, 20, 3, 38], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["not_operator", "N2"], 2], ["Insert", "N1", ["identifier:self", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:credential", "T"], 2], ["Insert", "N2", ["not:not", "T"], 0], ["Move", "N2", ["call", 3, 21, 3, 32], 1], ["Delete", ["==:==", 3, 33, 3, 35]], ["Delete", ["integer:0", 3, 36, 3, 37]], ["Delete", ["comparison_operator", 3, 21, 3, 37]]]
gconsidine/awx@c20300f6ce1c4d0946376366b436b46a543d45df
Fix for failing job template callback test.
[ { "sha": "530bb503153a5b97eadb6698f25faf6b5b5f1911", "filename": "awx/main/models/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/c20300f6ce1c4d0946376366b436b46a543d45df/awx%2Fmain%2Fmodels%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/c20300f6ce1c4d0946376366b436b46a543d45df/awx%2Fmain%2Fmodels%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2F__init__.py?ref=c20300f6ce1c4d0946376366b436b46a543d45df", "patch": "@@ -1608,7 +1608,7 @@ def can_start_without_user_input(self):\n for pw in inventory_source.source_passwords_needed:\n if pw not in needed:\n needed.append(pw)\n- return bool(len(needed) == 0)\n+ return bool(self.credential and not len(needed))\n \n class Job(CommonModelNameNotUnique):\n '''" } ]
awx
a32fc02323cda32021954036994c62cb0d680cf4
16d6ec252f11569567d203d5ccbcf4c57d18a67b
awx/main/models/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -109,7 +109,7 @@ class VarsDictProperty(object): pass if d is None and self.key_value: d = {} - for kv in [x.decode('utf-8') for x in shlex.split(extra_vars, posix=True)]: + for kv in [x.decode('utf-8') for x in shlex.split(v, posix=True)]: if '=' in kv: k, v = kv.split('=', 1) d[k] = v
for kv in [ x . decode ( 'utf-8' ) for x in shlex . split ( extra_vars , posix = True ) ] : if '=' in kv : k , v = kv . split ( '=' , 1 ) d [ k ] = v
for kv in [ x . decode ( 'utf-8' ) for x in shlex . split ( v , posix = True ) ] : if '=' in kv : k , v = kv . split ( '=' , 1 ) d [ k ] = v
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:extra_vars", 3, 63, 3, 73], "v"]]
gconsidine/awx@a32fc02323cda32021954036994c62cb0d680cf4
Fix NameError in case where job extra vars are specified as key=value.
[ { "sha": "04c9c022de62ad2d7a09b15279a12524fe203234", "filename": "awx/main/models/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/a32fc02323cda32021954036994c62cb0d680cf4/awx%2Fmain%2Fmodels%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/a32fc02323cda32021954036994c62cb0d680cf4/awx%2Fmain%2Fmodels%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2F__init__.py?ref=a32fc02323cda32021954036994c62cb0d680cf4", "patch": "@@ -109,7 +109,7 @@ def __get__(self, obj, type=None):\n pass\n if d is None and self.key_value:\n d = {}\n- for kv in [x.decode('utf-8') for x in shlex.split(extra_vars, posix=True)]:\n+ for kv in [x.decode('utf-8') for x in shlex.split(v, posix=True)]:\n if '=' in kv:\n k, v = kv.split('=', 1)\n d[k] = v" } ]
awx
413a799829538ae504a507a61fec721fb3a41679
72aacfd972c2f520f4e06f52aba580d7d737b561
awx/main/models/__init__.py
https://github.com/gconsidine/awx
true
false
true
@@ -1116,7 +1116,7 @@ class Credential(CommonModelNameNotUnique): if self.cloud != cloud: self.cloud = cloud if 'cloud' not in update_fields: - update_field.append('cloud') + update_fields.append('cloud') super(Credential, self).save(*args, **kwargs) # After saving a new instance for the first time, set the password # fields and save again.
update_field . append ( 'cloud' )
update_fields . append ( 'cloud' )
SAME_FUNCTION_WRONG_CALLER
[["Update", ["identifier:update_field", 3, 17, 3, 29], "update_fields"]]
gconsidine/awx@413a799829538ae504a507a61fec721fb3a41679
AC-537 Fix typo.
[ { "sha": "b53cbd5c12c7a83edd810f69fc250b3f6a82e0b6", "filename": "awx/main/models/__init__.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/413a799829538ae504a507a61fec721fb3a41679/awx%2Fmain%2Fmodels%2F__init__.py", "raw_url": "https://github.com/gconsidine/awx/raw/413a799829538ae504a507a61fec721fb3a41679/awx%2Fmain%2Fmodels%2F__init__.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fmain%2Fmodels%2F__init__.py?ref=413a799829538ae504a507a61fec721fb3a41679", "patch": "@@ -1116,7 +1116,7 @@ def save(self, *args, **kwargs):\n if self.cloud != cloud:\n self.cloud = cloud\n if 'cloud' not in update_fields:\n- update_field.append('cloud')\n+ update_fields.append('cloud')\n super(Credential, self).save(*args, **kwargs)\n # After saving a new instance for the first time, set the password\n # fields and save again." } ]
awx
e2b657c72cb4a2d3c7ffa60556e0f8c56e746b60
d5d3495494d65a19fce15de969edde53eaab3320
awx/api/serializers.py
https://github.com/gconsidine/awx
true
false
true
@@ -1018,7 +1018,7 @@ class ActivityStreamSerializer(BaseSerializer): rel['object_1'] = reverse('api:' + obj1_resolution + '_detail', args=(obj.object1_id,)) if obj.operation in ('associate', 'disassociate'): obj2_resolution = camelcase_to_underscore(obj.object2_type.split(".")[-1]) - rel['object_2'] = reverse('api:' + obj2_resolution + '_detail', args(obj.object2_id,)) + rel['object_2'] = reverse('api:' + obj2_resolution + '_detail', args=(obj.object2_id,)) return rel def get_summary_fields(self, obj):
rel [ 'object_2' ] = reverse ( 'api:' + obj2_resolution + '_detail' , args ( obj . object2_id , ) )
rel [ 'object_2' ] = reverse ( 'api:' + obj2_resolution + '_detail' , args = ( obj . object2_id , ) )
SINGLE_STMT
[["Insert", ["argument_list", 3, 38, 3, 99], ["keyword_argument", "N0"], 3], ["Move", "N0", ["identifier:args", 3, 77, 3, 81], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["tuple", "N1"], 2], ["Move", "N1", ["(:(", 3, 81, 3, 82], 0], ["Move", "N1", ["attribute", 3, 82, 3, 96], 1], ["Move", "N1", [",:,", 3, 96, 3, 97], 2], ["Move", "N1", ["):)", 3, 97, 3, 98], 3], ["Delete", ["argument_list", 3, 81, 3, 98]], ["Delete", ["call", 3, 77, 3, 98]]]
gconsidine/awx@e2b657c72cb4a2d3c7ffa60556e0f8c56e746b60
Fix a bug passing args to object 2 of the activitystream serializer
[ { "sha": "bb2db243131b9a25c8023532694fb51c1567ff78", "filename": "awx/api/serializers.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/e2b657c72cb4a2d3c7ffa60556e0f8c56e746b60/awx%2Fapi%2Fserializers.py", "raw_url": "https://github.com/gconsidine/awx/raw/e2b657c72cb4a2d3c7ffa60556e0f8c56e746b60/awx%2Fapi%2Fserializers.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/awx%2Fapi%2Fserializers.py?ref=e2b657c72cb4a2d3c7ffa60556e0f8c56e746b60", "patch": "@@ -1018,7 +1018,7 @@ def get_related(self, obj):\n rel['object_1'] = reverse('api:' + obj1_resolution + '_detail', args=(obj.object1_id,))\n if obj.operation in ('associate', 'disassociate'):\n obj2_resolution = camelcase_to_underscore(obj.object2_type.split(\".\")[-1])\n- rel['object_2'] = reverse('api:' + obj2_resolution + '_detail', args(obj.object2_id,))\n+ rel['object_2'] = reverse('api:' + obj2_resolution + '_detail', args=(obj.object2_id,))\n return rel\n \n def get_summary_fields(self, obj):" } ]
neo-python
35990e0ded0868fde57aafa2d74913aef4f3ba0e
25b0fac441f33fc9546f91bc4ccaf9fa2d0a23da
neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -150,7 +150,7 @@ class LevelDBBlockchain(Blockchain): if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): - dbhash = bytearray(value)[4:] + dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index)
dbhash = bytearray ( value ) [ 4 : ]
dbhash = bytearray ( value ) [ 8 : ]
CHANGE_NUMERIC_LITERAL
[["Update", ["integer:4", 3, 47, 3, 48], "8"]]
Ranchosm335/neo-python@35990e0ded0868fde57aafa2d74913aef4f3ba0e
fix for non-hexadecimal digit found error in leveldb
[ { "sha": "2d051bb05e6865741aea8ee879d2ea3c5463312c", "filename": "neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/35990e0ded0868fde57aafa2d74913aef4f3ba0e/neo%2FImplementations%2FBlockchains%2FLevelDB%2FLevelDBBlockchain.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/35990e0ded0868fde57aafa2d74913aef4f3ba0e/neo%2FImplementations%2FBlockchains%2FLevelDB%2FLevelDBBlockchain.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FImplementations%2FBlockchains%2FLevelDB%2FLevelDBBlockchain.py?ref=35990e0ded0868fde57aafa2d74913aef4f3ba0e", "patch": "@@ -150,7 +150,7 @@ def __init__(self, path):\n if self._stored_header_count == 0:\n headers = []\n for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block):\n- dbhash = bytearray(value)[4:]\n+ dbhash = bytearray(value)[8:]\n headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0))\n \n headers.sort(key=lambda h: h.Index)" } ]
neo-python
de6c7e038f237571b3acadaf6180bb0b286d75b8
d45aed147e353358f4babf47c41f1326a1418fd1
neo/VM/ScriptBuilder.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -106,7 +106,7 @@ class ScriptBuilder(object): elif data > 0 and data <= 16: return self.add(int.from_bytes(PUSH1, 'little') - 1 + data) else: - return self.push(binascii.hexlify(base256_encode(data))) + return self.push( binascii.hexlify( data.ToByteArray())) else: if not type(data) == bytearray: buf = binascii.unhexlify(data)
else : return self . push ( binascii . hexlify ( base256_encode ( data ) ) )
else : return self . push ( binascii . hexlify ( data . ToByteArray ( ) ) )
SINGLE_STMT
[["Insert", ["call", 3, 51, 3, 71], ["attribute", "N0"], 0], ["Move", "N0", ["identifier:data", 3, 66, 3, 70], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:ToByteArray", "T"], 2], ["Delete", ["identifier:base256_encode", 3, 51, 3, 65]]]
Ranchosm335/neo-python@de6c7e038f237571b3acadaf6180bb0b286d75b8
fixing issue with pushing big integers as invoke params
[ { "sha": "adfa6fd5d367103d8b19ea219cd0b043d52c613c", "filename": "neo/VM/ScriptBuilder.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/de6c7e038f237571b3acadaf6180bb0b286d75b8/neo%2FVM%2FScriptBuilder.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/de6c7e038f237571b3acadaf6180bb0b286d75b8/neo%2FVM%2FScriptBuilder.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FVM%2FScriptBuilder.py?ref=de6c7e038f237571b3acadaf6180bb0b286d75b8", "patch": "@@ -106,7 +106,7 @@ def push(self, data):\n elif data > 0 and data <= 16:\n return self.add(int.from_bytes(PUSH1, 'little') - 1 + data)\n else:\n- return self.push(binascii.hexlify(base256_encode(data)))\n+ return self.push( binascii.hexlify( data.ToByteArray()))\n else:\n if not type(data) == bytearray:\n buf = binascii.unhexlify(data)" } ]
neo-python
2d5023732ef219f6ecd30b50e4f1d2e5f22b5db8
9b7c2577eb43d8cdf528a0da69ef7c152f02e3f9
neo/Network/NeoNode.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -164,7 +164,7 @@ class NeoNode(Protocol): # Finally, after a message has been fully deserialized and propagated, # check if another message can be extracted with the current buffer: - if len(self.buffer_in) > 24: + if len(self.buffer_in) >= 24: self.CheckDataReceived() def MessageReceived(self, m):
if len ( self . buffer_in ) > 24 : self . CheckDataReceived ( )
if len ( self . buffer_in ) >= 24 : self . CheckDataReceived ( )
CHANGE_BINARY_OPERATOR
[["Insert", ["comparison_operator", 3, 12, 3, 36], [">=:>=", "T"], 1], ["Delete", [">:>", 3, 32, 3, 33]]]
Ranchosm335/neo-python@2d5023732ef219f6ecd30b50e4f1d2e5f22b5db8
fix: allow to check another message in buffer if >=24, not >24
[ { "sha": "21b864bae894f1b87a05f101847a70773e3f6a2a", "filename": "neo/Network/NeoNode.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/2d5023732ef219f6ecd30b50e4f1d2e5f22b5db8/neo%2FNetwork%2FNeoNode.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/2d5023732ef219f6ecd30b50e4f1d2e5f22b5db8/neo%2FNetwork%2FNeoNode.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FNetwork%2FNeoNode.py?ref=2d5023732ef219f6ecd30b50e4f1d2e5f22b5db8", "patch": "@@ -164,7 +164,7 @@ def CheckDataReceived(self):\n \n # Finally, after a message has been fully deserialized and propagated,\n # check if another message can be extracted with the current buffer:\n- if len(self.buffer_in) > 24:\n+ if len(self.buffer_in) >= 24:\n self.CheckDataReceived()\n \n def MessageReceived(self, m):" } ]
neo-python
69285bbb084cdb14bb8ca50ac47f671e1dd487bf
f9d8cf2cfdca50983731092b0471ced91d26cb29
neo/contrib/smartcontract.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -58,7 +58,7 @@ class SmartContract: return # call event handlers - handlers = set(self.event_handlers["*"] + self.event_handlers[sc_event.event_type]) # set(..) removes duplicates + handlers = set(self.event_handlers["*"] + self.event_handlers[sc_event.event_type.rpartition('.')[0] + ".*"] + self.event_handlers[sc_event.event_type]) # set(..) removes duplicates [event_handler(sc_event) for event_handler in handlers] def on_any(self, func):
handlers = set ( self . event_handlers [ "*" ] + self . event_handlers [ sc_event . event_type ] )
handlers = set ( self . event_handlers [ "*" ] + self . event_handlers [ sc_event . event_type . rpartition ( '.' ) [ 0 ] + ".*" ] + self . event_handlers [ sc_event . event_type ] )
CHANGE_BINARY_OPERAND
[["Insert", ["binary_operator", 3, 28, 3, 95], ["binary_operator", "N0"], 0], ["Insert", ["binary_operator", 3, 28, 3, 95], ["+:+", "T"], 1], ["Move", "N0", ["subscript", 3, 28, 3, 52], 0], ["Move", "N0", ["+:+", 3, 53, 3, 54], 1], ["Insert", "N0", ["subscript", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["[:[", "T"], 1], ["Insert", "N1", ["binary_operator", "N3"], 2], ["Insert", "N1", ["]:]", "T"], 3], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:event_handlers", "T"], 2], ["Insert", "N3", ["subscript", "N4"], 0], ["Insert", "N3", ["+:+", "T"], 1], ["Insert", "N3", ["string:\".*\"", "T"], 2], ["Insert", "N4", ["call", "N5"], 0], ["Insert", "N4", ["[:[", "T"], 1], ["Insert", "N4", ["integer:0", "T"], 2], ["Insert", "N4", ["]:]", "T"], 3], ["Insert", "N5", ["attribute", "N6"], 0], ["Insert", "N5", ["argument_list", "N7"], 1], ["Insert", "N6", ["attribute", "N8"], 0], ["Insert", "N6", [".:.", "T"], 1], ["Insert", "N6", ["identifier:rpartition", "T"], 2], ["Insert", "N7", ["(:(", "T"], 0], ["Insert", "N7", ["string:'.'", "T"], 1], ["Insert", "N7", ["):)", "T"], 2], ["Insert", "N8", ["identifier:sc_event", "T"], 0], ["Insert", "N8", [".:.", "T"], 1], ["Insert", "N8", ["identifier:event_type", "T"], 2]]
Ranchosm335/neo-python@69285bbb084cdb14bb8ca50ac47f671e1dd487bf
Fixed wildcard SmartContract events (#228) * Added support for wildcard SmartContract events such as SmartContract.Execution.*, SmartContract.Verification.*, SmartContract.Storage.*, and SmartContract.Contract.*. The previous implementation was only handling generic '*' wildcard events that match all events. The decorators @on_storage and @on_execution were not working previously.
[ { "sha": "29e0ded33747c155405849383f8ab766e80882f6", "filename": "neo/contrib/smartcontract.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/69285bbb084cdb14bb8ca50ac47f671e1dd487bf/neo%2Fcontrib%2Fsmartcontract.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/69285bbb084cdb14bb8ca50ac47f671e1dd487bf/neo%2Fcontrib%2Fsmartcontract.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2Fcontrib%2Fsmartcontract.py?ref=69285bbb084cdb14bb8ca50ac47f671e1dd487bf", "patch": "@@ -58,7 +58,7 @@ def call_on_event(sc_event):\n return\n \n # call event handlers\n- handlers = set(self.event_handlers[\"*\"] + self.event_handlers[sc_event.event_type]) # set(..) removes duplicates\n+ handlers = set(self.event_handlers[\"*\"] + self.event_handlers[sc_event.event_type.rpartition('.')[0] + \".*\"] + self.event_handlers[sc_event.event_type]) # set(..) removes duplicates\n [event_handler(sc_event) for event_handler in handlers]\n \n def on_any(self, func):" } ]
neo-python
149bed73cc65a001954dfc120d7ddc8a601c71bd
d47263bf614e8e237c626923862b19374869c6f9
prompt.py
https://github.com/Ranchosm335/neo-python
true
false
false
@@ -107,7 +107,7 @@ class PromptInterface(object): 'withdraw holds # lists all current holds', 'withdraw completed # lists completed holds eligible for cleanup', 'withdraw cancel # cancels current holds', - 'witdraw cleanup # cleans up completed holds', + 'withdraw cleanup # cleans up completed holds', 'withdraw # withdraws the first hold availabe', 'withdraw all # withdraw all holds available', 'send {assetId or name} {address} {amount} (--from-addr={addr})',
'witdraw cleanup # cleans up completed holds' ,
'withdraw cleanup # cleans up completed holds' ,
CHANGE_STRING_LITERAL
[["Update", ["string:'witdraw cleanup # cleans up completed holds'", 3, 17, 3, 62], "'withdraw cleanup # cleans up completed holds'"]]
Ranchosm335/neo-python@149bed73cc65a001954dfc120d7ddc8a601c71bd
fix typo in prompt.py (witdraw -> withdraw) (#257)
[ { "sha": "c331a89244cf5a624cad4f89666c4c00b2e4f621", "filename": "prompt.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/149bed73cc65a001954dfc120d7ddc8a601c71bd/prompt.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/149bed73cc65a001954dfc120d7ddc8a601c71bd/prompt.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/prompt.py?ref=149bed73cc65a001954dfc120d7ddc8a601c71bd", "patch": "@@ -107,7 +107,7 @@ class PromptInterface(object):\n 'withdraw holds # lists all current holds',\n 'withdraw completed # lists completed holds eligible for cleanup',\n 'withdraw cancel # cancels current holds',\n- 'witdraw cleanup # cleans up completed holds',\n+ 'withdraw cleanup # cleans up completed holds',\n 'withdraw # withdraws the first hold availabe',\n 'withdraw all # withdraw all holds available',\n 'send {assetId or name} {address} {amount} (--from-addr={addr})'," } ]
neo-python
384c63d714062195b803dc9fec35fba34a61134f
272d8a181f604e622dd1c193e850cffbc9b6e7ba
neo/Wallets/Wallet.py
https://github.com/Ranchosm335/neo-python
true
false
false
@@ -1057,7 +1057,7 @@ class Wallet(object): for key, unspents in paycoins.items(): if unspents is None: logger.error("insufficient funds for asset id: %s. \ - Is your wallet synced? Please enter 'wallet' and check \ + Is your wallet fully synced? Please enter 'wallet' and check \ 'percent_synced', it should be 100" % key) return None
logger . error ( "insufficient funds for asset id: %s. \ Is your wallet synced? Please enter 'wallet' and check \ 'percent_synced', it should be 100" % key )
logger . error ( "insufficient funds for asset id: %s. \ Is your wallet fully synced? Please enter 'wallet' and check \ 'percent_synced', it should be 100" % key )
CHANGE_BINARY_OPERAND
[["Update", ["string:\"insufficient funds for asset id: %s. \\\n Is your wallet synced? Please enter 'wallet' and check \\\n 'percent_synced', it should be 100\"", 2, 30, 4, 52], "\"insufficient funds for asset id: %s. \\\n Is your wallet fully synced? Please enter 'wallet' and check \\\n 'percent_synced', it should be 100\""]]
Ranchosm335/neo-python@384c63d714062195b803dc9fec35fba34a61134f
Added wallet sync hint to insufficient funds error
[ { "sha": "9a2943615b0bc0f317dc803c55a1d95a8eb0e0a6", "filename": "neo/Wallets/Wallet.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/384c63d714062195b803dc9fec35fba34a61134f/neo%2FWallets%2FWallet.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/384c63d714062195b803dc9fec35fba34a61134f/neo%2FWallets%2FWallet.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FWallets%2FWallet.py?ref=384c63d714062195b803dc9fec35fba34a61134f", "patch": "@@ -1057,7 +1057,7 @@ def MakeTransaction(self,\n for key, unspents in paycoins.items():\n if unspents is None:\n logger.error(\"insufficient funds for asset id: %s. \\\n- Is your wallet synced? Please enter 'wallet' and check \\\n+ Is your wallet fully synced? Please enter 'wallet' and check \\\n 'percent_synced', it should be 100\" % key)\n return None\n " } ]
neo-python
c003435ea116c9b8a6ffd300445a344b9dd963d3
533642a4510b18f47d3e373f30e85784e6364116
neo/Prompt/Commands/Invoke.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -294,7 +294,7 @@ def test_invoke(script, wallet, outputs, withdrawal_tx=None, from_addr=None): net_fee = None tx_gas = None - if consumed < Fixed8.Zero(): + if consumed <= Fixed8.Zero(): net_fee = Fixed8.FromDecimal(.001) tx_gas = Fixed8.Zero() else:
if consumed < Fixed8 . Zero ( ) : net_fee = Fixed8 . FromDecimal ( .001 ) tx_gas = Fixed8 . Zero ( ) else :
if consumed <= Fixed8 . Zero ( ) : net_fee = Fixed8 . FromDecimal ( .001 ) tx_gas = Fixed8 . Zero ( ) else :
CHANGE_BINARY_OPERATOR
[["Insert", ["comparison_operator", 3, 16, 3, 40], ["<=:<=", "T"], 1], ["Delete", ["<:<", 3, 25, 3, 26]]]
Ranchosm335/neo-python@c003435ea116c9b8a6ffd300445a344b9dd963d3
Fix Bug With Edge Case Gas Calc * If a transaction consumes > 9 GAS and <= 10 GAS, it will not go through because no GAS fee is attached to the transaction. It gets relayed, but never gets processed due to the lack of a fee. The root of this issue comes down to a simple operator issue when determining the GAS fee that is inconsistent with the NEO core reference implementation. The NEO core reference implementation handles this by attached the default "net_fee" of 0.001 when the GAS consumed is <= 10 GAS: https://github.com/neo-project/neo-gui/blob/master/neo-gui/UI/InvokeContractDialog.cs#L153 The neo-python implementation isn't quite handling this properly due to the fact that it is only checking for GAS consumed being less than zero. In the scenario where the GAS consumed is > 9 and <= 10, the consumed value is -0.xx. The Ceil() converts this to 0, but then the consumed check doesn't properly account for this case. A simple operator change from < to <= solves it.
[ { "sha": "635e1850e9244aa71570b991ace8a55b6d9df0db", "filename": "neo/Prompt/Commands/Invoke.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/c003435ea116c9b8a6ffd300445a344b9dd963d3/neo%2FPrompt%2FCommands%2FInvoke.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/c003435ea116c9b8a6ffd300445a344b9dd963d3/neo%2FPrompt%2FCommands%2FInvoke.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FPrompt%2FCommands%2FInvoke.py?ref=c003435ea116c9b8a6ffd300445a344b9dd963d3", "patch": "@@ -294,7 +294,7 @@ def test_invoke(script, wallet, outputs, withdrawal_tx=None, from_addr=None):\n net_fee = None\n tx_gas = None\n \n- if consumed < Fixed8.Zero():\n+ if consumed <= Fixed8.Zero():\n net_fee = Fixed8.FromDecimal(.001)\n tx_gas = Fixed8.Zero()\n else:" } ]
neo-python
74c9972539618de5eb3d2a19ca5178b227c4ad59
ec232819722a167715bcd270c5c4cf0e531ac66e
neo/Prompt/Commands/Invoke.py
https://github.com/Ranchosm335/neo-python
true
false
true
@@ -479,7 +479,7 @@ def test_deploy_and_invoke(deploy_script, invoke_args, wallet): consumed = engine.GasConsumed() - Fixed8.FromDecimal(10) consumed = consumed.Ceil() - if consumed < Fixed8.Zero(): + if consumed <= Fixed8.Zero(): consumed = Fixed8.FromDecimal(.001) total_ops = engine.ops_processed
if consumed < Fixed8 . Zero ( ) : consumed = Fixed8 . FromDecimal ( .001 )
if consumed <= Fixed8 . Zero ( ) : consumed = Fixed8 . FromDecimal ( .001 )
CHANGE_BINARY_OPERATOR
[["Insert", ["comparison_operator", 3, 16, 3, 40], ["<=:<=", "T"], 1], ["Delete", ["<:<", 3, 25, 3, 26]]]
Ranchosm335/neo-python@74c9972539618de5eb3d2a19ca5178b227c4ad59
Fix test_deploy_and_invoke Edge Case * Applying the same GAS edge case fix for test_deploy_and_invoke where GAS consumed > 9 and <= 10.
[ { "sha": "5efcd2abe873156319237fcb3982da2956451a5c", "filename": "neo/Prompt/Commands/Invoke.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/74c9972539618de5eb3d2a19ca5178b227c4ad59/neo%2FPrompt%2FCommands%2FInvoke.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/74c9972539618de5eb3d2a19ca5178b227c4ad59/neo%2FPrompt%2FCommands%2FInvoke.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FPrompt%2FCommands%2FInvoke.py?ref=74c9972539618de5eb3d2a19ca5178b227c4ad59", "patch": "@@ -479,7 +479,7 @@ def test_deploy_and_invoke(deploy_script, invoke_args, wallet):\n consumed = engine.GasConsumed() - Fixed8.FromDecimal(10)\n consumed = consumed.Ceil()\n \n- if consumed < Fixed8.Zero():\n+ if consumed <= Fixed8.Zero():\n consumed = Fixed8.FromDecimal(.001)\n \n total_ops = engine.ops_processed" } ]
neo-python
f128129780110e71ab945e9b9ecb577bd259d420
42dd776d22f9c9a9effde3b095bc56ffbf07b098
neo/Prompt/Commands/Invoke.py
https://github.com/Ranchosm335/neo-python
true
false
false
@@ -35,7 +35,7 @@ from neo.EventHub import events from neo.VM.OpCode import * import json -DEFAULT_MIN_FEE = Fixed8.FromDecimal(.001) +DEFAULT_MIN_FEE = Fixed8.FromDecimal(.0001) def InvokeContract(wallet, tx, fee=Fixed8.Zero()):
DEFAULT_MIN_FEE = Fixed8 . FromDecimal ( .001 )
DEFAULT_MIN_FEE = Fixed8 . FromDecimal ( .0001 )
CHANGE_NUMERIC_LITERAL
[["Update", ["float:.001", 3, 38, 3, 42], ".0001"]]
Ranchosm335/neo-python@f128129780110e71ab945e9b9ecb577bd259d420
Changed default min fee to a bit less since GAS prices have gone up a lot since this was originally written :)
[ { "sha": "f8345dc87e4d616674a8f3c95540b81d183d97d3", "filename": "neo/Prompt/Commands/Invoke.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/Ranchosm335/neo-python/blob/f128129780110e71ab945e9b9ecb577bd259d420/neo%2FPrompt%2FCommands%2FInvoke.py", "raw_url": "https://github.com/Ranchosm335/neo-python/raw/f128129780110e71ab945e9b9ecb577bd259d420/neo%2FPrompt%2FCommands%2FInvoke.py", "contents_url": "https://api.github.com/repos/Ranchosm335/neo-python/contents/neo%2FPrompt%2FCommands%2FInvoke.py?ref=f128129780110e71ab945e9b9ecb577bd259d420", "patch": "@@ -35,7 +35,7 @@\n from neo.VM.OpCode import *\n import json\n \n-DEFAULT_MIN_FEE = Fixed8.FromDecimal(.001)\n+DEFAULT_MIN_FEE = Fixed8.FromDecimal(.0001)\n \n \n def InvokeContract(wallet, tx, fee=Fixed8.Zero()):" } ]
awx
bd3e4ba943b393dafb894d3134d687ee1e02a8a6
5a03fdf84147a611407d4b6f290508e4a2e0a885
lib/main/rbac.py
https://github.com/gconsidine/awx
true
false
true
@@ -11,7 +11,7 @@ class CustomRbac(permissions.BasePermission): def _common_user_check(self, request): # no anonymous users - if type(request.user) == AnonymousUser: + if request.user.is_anonymous(): # 401, not 403, hence no raised exception return False # superusers are always good
if type ( request . user ) == AnonymousUser : return False
if request . user . is_anonymous ( ) : return False
SINGLE_STMT
[["Insert", ["if_statement", 3, 9, 5, 25], ["call", "N0"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["attribute", 3, 17, 3, 29], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:is_anonymous", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["):)", "T"], 1], ["Delete", ["identifier:type", 3, 12, 3, 16]], ["Delete", ["(:(", 3, 16, 3, 17]], ["Delete", ["):)", 3, 29, 3, 30]], ["Delete", ["argument_list", 3, 16, 3, 30]], ["Delete", ["call", 3, 12, 3, 30]], ["Delete", ["==:==", 3, 31, 3, 33]], ["Delete", ["identifier:AnonymousUser", 3, 34, 3, 47]], ["Delete", ["comparison_operator", 3, 12, 3, 47]]]
gconsidine/awx@bd3e4ba943b393dafb894d3134d687ee1e02a8a6
Use request.user.is_anonymous versus doing type check.
[ { "sha": "2c44badb010517da37076602bac712d6ae3b271f", "filename": "lib/main/rbac.py", "status": "modified", "additions": 1, "deletions": 1, "changes": 2, "blob_url": "https://github.com/gconsidine/awx/blob/bd3e4ba943b393dafb894d3134d687ee1e02a8a6/lib%2Fmain%2Frbac.py", "raw_url": "https://github.com/gconsidine/awx/raw/bd3e4ba943b393dafb894d3134d687ee1e02a8a6/lib%2Fmain%2Frbac.py", "contents_url": "https://api.github.com/repos/gconsidine/awx/contents/lib%2Fmain%2Frbac.py?ref=bd3e4ba943b393dafb894d3134d687ee1e02a8a6", "patch": "@@ -11,7 +11,7 @@ class CustomRbac(permissions.BasePermission):\n \n def _common_user_check(self, request):\n # no anonymous users\n- if type(request.user) == AnonymousUser:\n+ if request.user.is_anonymous():\n # 401, not 403, hence no raised exception\n return False\n # superusers are always good" } ]
matplotlib
6f06a5e71d25475e25e016ce190d5c13e838e73f
60fc37e0ad6c63460af17e63ca3deff6cf9f0b3e
lib/matplotlib/rcsetup.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -14,7 +14,7 @@ from matplotlib.colors import is_color_like interactive_bk = ['gtk', 'gtkagg', 'gtkcairo', 'fltkagg', 'qtagg', 'qt4agg', 'tkagg', 'wx', 'wxagg', 'cocoaagg'] -non_interactive_bk = ['agg', 'cairo', 'emv', 'gdk', +non_interactive_bk = ['agg', 'cairo', 'emf', 'gdk', 'pdf', 'ps', 'svg', 'template'] all_backends = interactive_bk + non_interactive_bk
non_interactive_bk = [ 'agg' , 'cairo' , 'emv' , 'gdk' , 'pdf' , 'ps' , 'svg' , 'template' ]
non_interactive_bk = [ 'agg' , 'cairo' , 'emf' , 'gdk' , 'pdf' , 'ps' , 'svg' , 'template' ]
CHANGE_STRING_LITERAL
[["Update", ["string:'emv'", 3, 39, 3, 44], "'emf'"]]
dsquareindia/matplotlib@6f06a5e71d25475e25e016ce190d5c13e838e73f
null
null
matplotlib
f48ef81235cb59570caa029dc5f08c8d7eb7fea2
6f06a5e71d25475e25e016ce190d5c13e838e73f
lib/matplotlib/image.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -170,7 +170,7 @@ class AxesImage(martist.Artist, cm.ScalarMappable): if self._imcache is None: if self._A.dtype == np.uint8 and len(self._A.shape) == 3: - im = _image.frombyte(self._A[xslice,yslice,:], 0) + im = _image.frombyte(self._A[yslice,xslice,:], 0) im.is_grayscale = False else: if self._rgbacache is None:
im = _image . frombyte ( self . _A [ xslice , yslice , : ] , 0 )
im = _image . frombyte ( self . _A [ yslice , xslice , : ] , 0 )
SINGLE_STMT
[["Update", ["identifier:xslice", 3, 46, 3, 52], "yslice"], ["Update", ["identifier:yslice", 3, 53, 3, 59], "xslice"]]
dsquareindia/matplotlib@f48ef81235cb59570caa029dc5f08c8d7eb7fea2
null
null
matplotlib
e024ab76f14112bdf96d852ee818e391f84f4bd5
a3443376238f59f4df9e7ec2a173ed5ba124e44f
lib/matplotlib/text.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -394,7 +394,7 @@ class Text(Artist): return (x, y, self._text, self._color, self._verticalalignment, self._horizontalalignment, hash(self._fontproperties), self._rotation, - self._renderer.dpi + self._renderer.dpi, id(self._renderer) ) def get_text(self):
return ( x , y , self . _text , self . _color , self . _verticalalignment , self . _horizontalalignment , hash ( self . _fontproperties ) , self . _rotation , self . _renderer . dpi )
return ( x , y , self . _text , self . _color , self . _verticalalignment , self . _horizontalalignment , hash ( self . _fontproperties ) , self . _rotation , self . _renderer . dpi , id ( self . _renderer ) )
ADD_ELEMENTS_TO_ITERABLE
[["Insert", ["tuple", 0, 16, 4, 18], [",:,", "T"], 18], ["Insert", ["tuple", 0, 16, 4, 18], ["call", "N0"], 19], ["Insert", ["tuple", 0, 16, 4, 18], ["):)", "T"], 20], ["Insert", "N0", ["identifier:id", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["attribute", "N2"], 1], ["Move", "N1", ["):)", 4, 17, 4, 18], 2], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:_renderer", "T"], 2]]
dsquareindia/matplotlib@e024ab76f14112bdf96d852ee818e391f84f4bd5
null
null
matplotlib
a61ff31ecfe8d33424ff75f4b0168872acbd9f1b
75facdaa43e12e3e4e084716182c6814251e8417
doc/sphinxext/ipython_console_highlighting.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -13,7 +13,7 @@ class IPythonConsoleLexer(Lexer): Tracebacks are not currently supported. - .. sourcecode:: pycon + .. sourcecode:: ipython In [1]: a = 'foo'
Tracebacks are not currently supported . . . sourcecode : : pycon
Tracebacks are not currently supported . . . sourcecode : : ipython
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:pycon", 3, 21, 3, 26], "ipython"]]
dsquareindia/matplotlib@a61ff31ecfe8d33424ff75f4b0168872acbd9f1b
null
null
matplotlib
6f2d6082950d9e4259fb9bde23151cd3cde50940
6f0fa53b2d1bdc11fc362c4e8e918dfb70e1d2f0
lib/matplotlib/transforms.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -808,7 +808,7 @@ class Bbox(BboxBase): return points, minpos, changed = update_path_extents( - Path(xy), None, self._points, self._minpos, ignore) + Path(xym), None, self._points, self._minpos, ignore) if changed: self._points = points
points , minpos , changed = update_path_extents ( Path ( xy ) , None , self . _points , self . _minpos , ignore )
points , minpos , changed = update_path_extents ( Path ( xym ) , None , self . _points , self . _minpos , ignore )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:xy", 3, 18, 3, 20], "xym"]]
dsquareindia/matplotlib@6f2d6082950d9e4259fb9bde23151cd3cde50940
null
null
matplotlib
c301cf1563dfd4a10b0e5a67fcd6f0ab77030dcb
414a9d785a615542c0a3f43f9325a9db6ac3c933
examples/pylab_examples/mri_with_eeg.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -24,7 +24,7 @@ if 1: # plot the MRI in pcolor if 1: # plot the histogram of MRI intensity subplot(222) im = ravel(im) - im = take(im, nonzero(im)) # ignore the background + im = ravel(take(im, nonzero(im))) # ignore the background im = im/(2.0**15) # normalize hist(im, 100) xticks([-1, -.5, 0, .5, 1])
im = take ( im , nonzero ( im ) )
im = ravel ( take ( im , nonzero ( im ) ) )
ADD_FUNCTION_AROUND_EXPRESSION
[["Insert", ["call", 3, 10, 3, 31], ["identifier:ravel", "T"], 0], ["Insert", ["call", 3, 10, 3, 31], ["argument_list", "N0"], 1], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["call", 3, 10, 3, 31], 1], ["Insert", "N0", ["):)", "T"], 2]]
dsquareindia/matplotlib@c301cf1563dfd4a10b0e5a67fcd6f0ab77030dcb
null
null
matplotlib
d90b08a3b7c63ead65d32ae3cbfefaf81141416c
b3332369d54a930aa129ded0ec65984b92c79ea8
lib/matplotlib/collections.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -284,7 +284,7 @@ class Collection(artist.Artist, cm.ScalarMappable): dashes.append(dashd[cbook.ls_mapper[x]]) else: raise ValueError() - elif cbook.iterator(x) and len(x) == 2: + elif cbook.iterable(x) and len(x) == 2: dashes.append(x) else: raise ValueError()
elif cbook . iterator ( x ) and len ( x ) == 2 : dashes . append ( x )
elif cbook . iterable ( x ) and len ( x ) == 2 : dashes . append ( x )
WRONG_FUNCTION_NAME
[["Update", ["identifier:iterator", 3, 36, 3, 44], "iterable"]]
dsquareindia/matplotlib@d90b08a3b7c63ead65d32ae3cbfefaf81141416c
null
null
matplotlib
c61e303b2888660ee9f4377e31a443043f03b2d1
5d6171a62978c62c720112df5a341c0fc6d7a3da
lib/matplotlib/backends/backend_pdf.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -1106,7 +1106,7 @@ end""" cmds.append(Op.lineto) elif code == Path.CURVE3: points = quad2cubic(*(list(last_points[-2:]) + list(points))) - cmds.extend(points) + cmds.extend(points[2:]) cmds.append(Op.curveto) elif code == Path.CURVE4: cmds.extend(points)
cmds . extend ( points )
cmds . extend ( points [ 2 : ] )
SINGLE_STMT
[["Insert", ["argument_list", 3, 28, 3, 36], ["subscript", "N0"], 1], ["Move", "N0", ["identifier:points", 3, 29, 3, 35], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["slice", "N1"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Insert", "N1", ["integer:2", "T"], 0], ["Insert", "N1", [":::", "T"], 1]]
dsquareindia/matplotlib@c61e303b2888660ee9f4377e31a443043f03b2d1
null
null
matplotlib
fbdf7e95ae64c24e5c7a5351b42ac4ca90a78c14
2ec7b677df8f2d7f604f88c10e0b35f1b541d1f7
lib/matplotlib/transforms.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -599,7 +599,7 @@ class BboxBase(TransformNode): dx1 = np.sign(vertices[:, 0] - x1) dy1 = np.sign(vertices[:, 1] - y1) inside = (abs(dx0 + dx1) + abs(dy0 + dy1)) <= 2 - return N.sum(inside) + return np.sum(inside) def count_overlaps(self, bboxes):
return N . sum ( inside )
return np . sum ( inside )
SAME_FUNCTION_WRONG_CALLER
[["Update", ["identifier:N", 3, 16, 3, 17], "np"]]
dsquareindia/matplotlib@fbdf7e95ae64c24e5c7a5351b42ac4ca90a78c14
null
null
matplotlib
a4953e5f8b4a9f33c506b4578b0346daa81af32a
937c7e2fcad0ed1dff5590297b1b0c4458a426d9
lib/matplotlib/mlab.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -2719,7 +2719,7 @@ def griddata(x,y,z,xi,yi,interp='nn'): # remove masked points. if hasattr(z,'mask'): # make sure mask is not a scalar boolean array. - if a.mask.ndim: + if z.mask.ndim: x = x.compress(z.mask == False) y = y.compress(z.mask == False) z = z.compressed()
if a . mask . ndim : x = x . compress ( z . mask == False ) y = y . compress ( z . mask == False ) z = z . compressed ( )
if z . mask . ndim : x = x . compress ( z . mask == False ) y = y . compress ( z . mask == False ) z = z . compressed ( )
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:a", 3, 12, 3, 13], "z"]]
dsquareindia/matplotlib@a4953e5f8b4a9f33c506b4578b0346daa81af32a
null
null
matplotlib
12d012434db58073583ca507b7f4de9c9078f1a1
b5f14d5d8c937092062a0d69ea98670ce80b871b
lib/matplotlib/widgets.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -1433,7 +1433,7 @@ class LassoSelector(AxesWidget): The selected path can be used in conjunction with - :function:`~matplotlib.path.Path.contains_point` to select + :func:`~matplotlib.path.Path.contains_point` to select data points from an image. In contrast to :class:`Lasso`, `LassoSelector` is written with an interface
with : function : `~matplotlib.path.Path.contains_point` to select
with : func : `~matplotlib.path.Path.contains_point` to select
CHANGE_IDENTIFIER_USED
[["Update", ["identifier:function", 2, 6, 2, 14], "func"]]
dsquareindia/matplotlib@12d012434db58073583ca507b7f4de9c9078f1a1
null
null
matplotlib
f38fcb392d1d247b933f00e65022892007fb8325
1935f1273aef443895a4da4e5f2c4aa86103a414
lib/matplotlib/animation.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -399,7 +399,7 @@ class FFMpegFileWriter(FileMovieWriter, FFMpegBase): def _args(self): # Returns the command line parameters for subprocess to use # ffmpeg to create a movie using a collection of temp images - return [self.bin_path(), '-i', self._base_temp_name()-vframes, + return [self.bin_path(), '-i', self._base_temp_name(), '-vframes', str(self._frame_counter), '-r', str(self.fps), ] + self.output_args
return [ self . bin_path ( ) , '-i' , self . _base_temp_name ( ) - vframes , '-vframes' , str ( self . _frame_counter ) , '-r' , str ( self . fps ) , ] + self . output_args
return [ self . bin_path ( ) , '-i' , self . _base_temp_name ( ) , '-vframes' , str ( self . _frame_counter ) , '-r' , str ( self . fps ) , ] + self . output_args
SINGLE_STMT
[["Move", ["list", 3, 16, 5, 39], ["call", 3, 40, 3, 62], 5], ["Delete", ["-:-", 3, 62, 3, 63]], ["Delete", ["identifier:vframes", 3, 63, 3, 70]], ["Delete", ["binary_operator", 3, 40, 3, 70]]]
dsquareindia/matplotlib@f38fcb392d1d247b933f00e65022892007fb8325
null
null
matplotlib
5c8f3d605ff045ddfbc3ca950aef85366617af5a
f38fcb392d1d247b933f00e65022892007fb8325
lib/matplotlib/animation.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -401,7 +401,7 @@ class FFMpegFileWriter(FileMovieWriter, FFMpegBase): # ffmpeg to create a movie using a collection of temp images return [self.bin_path(), '-i', self._base_temp_name(), '-vframes', str(self._frame_counter), - '-r', str(self.fps), ] + self.output_args + '-r', str(self.fps)] + self.output_args # Base class of avconv information. AVConv has identical arguments to
return [ self . bin_path ( ) , '-i' , self . _base_temp_name ( ) , '-vframes' , str ( self . _frame_counter ) , '-r' , str ( self . fps ) , ] + self . output_args
return [ self . bin_path ( ) , '-i' , self . _base_temp_name ( ) , '-vframes' , str ( self . _frame_counter ) , '-r' , str ( self . fps ) ] + self . output_args
CHANGE_BINARY_OPERAND
[["Delete", [",:,", 3, 36, 3, 37]]]
dsquareindia/matplotlib@5c8f3d605ff045ddfbc3ca950aef85366617af5a
null
null
matplotlib
ebd5b0988c18471e64acfc807dd435c4f2652d8b
983575a8d9aa6f99535b4c0b4504a9e4ea847d4f
lib/matplotlib/backends/qt4_editor/formsubplottool.py
https://github.com/dsquareindia/matplotlib
true
false
true
@@ -216,7 +216,7 @@ class UiSubplotTool(QtGui.QDialog): # button bar hbox2 = QtGui.QHBoxLayout() gbox.addLayout(hbox2, 8, 0, 1, 1) - self.tightlayout = QtGui.QPushButton('Thight Layout', self) + self.tightlayout = QtGui.QPushButton('Tight Layout', self) spacer = QtGui.QSpacerItem( 5, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.resetbutton = QtGui.QPushButton('Reset', self)
self . tightlayout = QtGui . QPushButton ( 'Thight Layout' , self )
self . tightlayout = QtGui . QPushButton ( 'Tight Layout' , self )
CHANGE_STRING_LITERAL
[["Update", ["string:'Thight Layout'", 3, 46, 3, 61], "'Tight Layout'"]]
dsquareindia/matplotlib@ebd5b0988c18471e64acfc807dd435c4f2652d8b
null
null
matplotlib
f234476cb739918a51e8f3def817c14f7b6c2348
71c77f085827b4b269bdffb916702d0f4dedbb85
lib/matplotlib/axes/_axes.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -950,7 +950,7 @@ class Axes(_AxesBase): x : scalar or 1D array_like x-indexes where to plot the lines. - xmin, xmax : scalar or 1D array_like + ymin, ymax : scalar or 1D array_like Respective beginning and end of each line. If scalars are provided, all lines will have same length.
x - indexes where to plot the lines . xmin , xmax : scalar or 1 D array_like
x - indexes where to plot the lines . ymin , ymax : scalar or 1 D array_like
SINGLE_STMT
[["Update", ["identifier:xmax", 3, 15, 3, 19], "ymax"], ["Update", ["identifier:xmin", 3, 9, 3, 13], "ymin"]]
dsquareindia/matplotlib@f234476cb739918a51e8f3def817c14f7b6c2348
null
null
matplotlib
f968f14e0157ff0ea8d39d0b6be4528d16563ea4
a78ed1474c59e97c58bb156da95a431ab6ed19a8
lib/matplotlib/axes.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -3754,7 +3754,7 @@ class Axes(martist.Artist): x : scalar or 1D array_like x-indexes where to plot the lines. - xmin, xmax : scalar or 1D array_like + ymin, ymax : scalar or 1D array_like Respective beginning and end of each line. If scalars are provided, all lines will have same length.
x - indexes where to plot the lines . xmin , xmax : scalar or 1 D array_like
x - indexes where to plot the lines . ymin , ymax : scalar or 1 D array_like
SINGLE_STMT
[["Update", ["identifier:xmax", 3, 15, 3, 19], "ymax"], ["Update", ["identifier:xmin", 3, 9, 3, 13], "ymin"]]
dsquareindia/matplotlib@f968f14e0157ff0ea8d39d0b6be4528d16563ea4
null
null
matplotlib
b7812ee001a43c309ac71e263a99f2700d8b8c8e
9be5ac4f6842b430a94afefbd96f253d09605f44
lib/matplotlib/sphinxext/plot_directive.py
https://github.com/dsquareindia/matplotlib
true
false
false
@@ -147,7 +147,7 @@ import sphinx sphinx_version = sphinx.__version__.split(".") # The split is necessary for sphinx beta versions where the string is # '6b1' -sphinx_version = tuple([int(re.split('[a-z]', x)[0]) +sphinx_version = tuple([int(re.split('[^0-9]', x)[0]) for x in sphinx_version[:2]]) try:
sphinx_version = tuple ( [ int ( re . split ( '[a-z]' , x ) [ 0 ] ) for x in sphinx_version [ : 2 ] ] )
sphinx_version = tuple ( [ int ( re . split ( '[^0-9]' , x ) [ 0 ] ) for x in sphinx_version [ : 2 ] ] )
CHANGE_STRING_LITERAL
[["Update", ["string:'[a-z]'", 3, 38, 3, 45], "'[^0-9]'"]]
dsquareindia/matplotlib@b7812ee001a43c309ac71e263a99f2700d8b8c8e
null
null