func_name
stringlengths
2
53
func_src_before
stringlengths
63
114k
func_src_after
stringlengths
86
114k
line_changes
dict
char_changes
dict
commit_link
stringlengths
66
117
file_name
stringlengths
5
72
vul_type
stringclasses
9 values
quiz
@app.route('/quiz') def quiz(): varga = request.args.get('varga') try: rows =[] with sql.connect('amara.db') as con: con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada.varga = '%s' order by random() limit 1;" % varga) rows = cur.fetchall(); artha = rows[0]["artha"]; cur.execute("select pada from pada where varga = '%s' and artha = '%s' order by id" % (varga, artha)); paryaya = cur.fetchall(); return render_template('quiz.html', rows=rows, paryaya=paryaya, varga=varga) finally: con.close()
@app.route('/quiz') def quiz(): varga = request.args.get('varga') try: rows =[] with sql.connect('amara.db') as con: con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada.varga = ? order by random() limit 1;", [varga]) rows = cur.fetchall(); artha = rows[0]["artha"]; cur.execute("select pada from pada where varga = ? and artha = ? order by id", [varga, artha]); paryaya = cur.fetchall(); return render_template('quiz.html', rows=rows, paryaya=paryaya, varga=varga) finally: con.close()
{ "deleted": [ { "line_no": 12, "char_start": 213, "char_end": 371, "line": " cur.execute(\"select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada.varga = '%s' order by random() limit 1;\" % varga)\n" }, { "line_no": 16, "char_start": 445, "char_end": 560, "line": " cur.execute(\"select pada from pada where varga = '%s' and artha = '%s' order by id\" % (varga, artha));\n" } ], "added": [ { "line_no": 12, "char_start": 213, "char_end": 369, "line": " cur.execute(\"select * from pada inner join mula on pada.sloka_line = mula.sloka_line where pada.varga = ? order by random() limit 1;\", [varga])\n" }, { "line_no": 16, "char_start": 443, "char_end": 551, "line": " cur.execute(\"select pada from pada where varga = ? and artha = ? order by id\", [varga, artha]);\n" } ] }
{ "deleted": [ { "char_start": 329, "char_end": 333, "chars": "'%s'" }, { "char_start": 361, "char_end": 363, "chars": " %" }, { "char_start": 506, "char_end": 510, "chars": "'%s'" }, { "char_start": 523, "char_end": 527, "chars": "'%s'" }, { "char_start": 540, "char_end": 542, "chars": " %" }, { "char_start": 543, "char_end": 544, "chars": "(" }, { "char_start": 556, "char_end": 557, "chars": ")" } ], "added": [ { "char_start": 329, "char_end": 330, "chars": "?" }, { "char_start": 358, "char_end": 359, "chars": "," }, { "char_start": 360, "char_end": 361, "chars": "[" }, { "char_start": 366, "char_end": 367, "chars": "]" }, { "char_start": 504, "char_end": 505, "chars": "?" }, { "char_start": 518, "char_end": 519, "chars": "?" }, { "char_start": 532, "char_end": 533, "chars": "," }, { "char_start": 534, "char_end": 535, "chars": "[" }, { "char_start": 547, "char_end": 548, "chars": "]" } ] }
github.com/aupasana/amara-quiz/commit/6ceb5dc8ec38b4a3f1399e578ab970f7e3354922
docker/app.py
cwe-089
reportMatch._checkPairing
def _checkPairing(): if winner == loser: raise ValueError('Attempt to match player against self') q = ''' SELECT COUNT(*) FROM matches WHERE (matches.winner_id = %s AND matches.loser_id = %s) OR (matches.winner_id = %s AND matches.loser_id = %s); ''' % (winner, loser, loser, winner) cur.execute(q) if cur.fetchone()[0] > 0: raise ValueError('Pairing %s, %s already played' % (winner, loser))
def _checkPairing(): if winner == loser: raise ValueError('Attempt to match player against self') q = ''' SELECT COUNT(*) FROM matches WHERE (matches.winner_id = %s AND matches.loser_id = %s) OR (matches.winner_id = %s AND matches.loser_id = %s); ''' cur.execute(q, (winner, loser, loser, winner)) if cur.fetchone()[0] > 0: raise ValueError('Pairing %s, %s already played' % (winner, loser))
{ "deleted": [ { "line_no": 9, "char_start": 310, "char_end": 355, "line": " ''' % (winner, loser, loser, winner)\n" }, { "line_no": 10, "char_start": 355, "char_end": 378, "line": " cur.execute(q)\n" } ], "added": [ { "line_no": 9, "char_start": 310, "char_end": 322, "line": " '''\n" }, { "line_no": 10, "char_start": 322, "char_end": 377, "line": " cur.execute(q, (winner, loser, loser, winner))\n" } ] }
{ "deleted": [ { "char_start": 322, "char_end": 323, "chars": "%" }, { "char_start": 354, "char_end": 376, "chars": "\n cur.execute(q" } ], "added": [ { "char_start": 321, "char_end": 327, "chars": "\n " }, { "char_start": 328, "char_end": 344, "chars": " cur.execute(q," } ] }
github.com/juanchopanza/Tournament/commit/5799aee52d2cabb685800b88977257bd0964d0da
vagrant/tournament/tournament.py
cwe-089
add_month_data_row
def add_month_data_row(self, inverter_serial, ts, etoday, etotal): y = datetime.fromtimestamp(ts) - timedelta(days=1) y_ts = int(datetime(y.year, y.month, y.day, 23, tzinfo=pytz.utc).timestamp()) query = ''' INSERT INTO MonthData ( TimeStamp, Serial, DayYield, TotalYield ) VALUES ( %s, %s, %s, %s ); ''' % (y_ts, inverter_serial, etoday, etotal) self.c.execute(query)
def add_month_data_row(self, inverter_serial, ts, etoday, etotal): y = datetime.fromtimestamp(ts) - timedelta(days=1) y_ts = int(datetime(y.year, y.month, y.day, 23, tzinfo=pytz.utc).timestamp()) query = ''' INSERT INTO MonthData ( TimeStamp, Serial, DayYield, TotalYield ) VALUES ( ?, ?, ?, ? ); ''' self.c.execute(query, (y_ts, inverter_serial, etoday, etotal))
{ "deleted": [ { "line_no": 13, "char_start": 434, "char_end": 454, "line": " %s,\n" }, { "line_no": 14, "char_start": 454, "char_end": 474, "line": " %s,\n" }, { "line_no": 15, "char_start": 474, "char_end": 494, "line": " %s,\n" }, { "line_no": 16, "char_start": 494, "char_end": 513, "line": " %s\n" }, { "line_no": 18, "char_start": 528, "char_end": 582, "line": " ''' % (y_ts, inverter_serial, etoday, etotal)\n" }, { "line_no": 19, "char_start": 582, "char_end": 611, "line": " self.c.execute(query)\n" } ], "added": [ { "line_no": 13, "char_start": 434, "char_end": 453, "line": " ?,\n" }, { "line_no": 14, "char_start": 453, "char_end": 472, "line": " ?,\n" }, { "line_no": 15, "char_start": 472, "char_end": 491, "line": " ?,\n" }, { "line_no": 16, "char_start": 491, "char_end": 509, "line": " ?\n" }, { "line_no": 18, "char_start": 524, "char_end": 536, "line": " '''\n" }, { "line_no": 19, "char_start": 536, "char_end": 606, "line": " self.c.execute(query, (y_ts, inverter_serial, etoday, etotal))\n" } ] }
{ "deleted": [ { "char_start": 450, "char_end": 452, "chars": "%s" }, { "char_start": 470, "char_end": 472, "chars": "%s" }, { "char_start": 490, "char_end": 492, "chars": "%s" }, { "char_start": 510, "char_end": 512, "chars": "%s" }, { "char_start": 540, "char_end": 541, "chars": "%" }, { "char_start": 581, "char_end": 610, "chars": "\n self.c.execute(query" } ], "added": [ { "char_start": 450, "char_end": 451, "chars": "?" }, { "char_start": 469, "char_end": 470, "chars": "?" }, { "char_start": 488, "char_end": 489, "chars": "?" }, { "char_start": 507, "char_end": 508, "chars": "?" }, { "char_start": 535, "char_end": 541, "chars": "\n " }, { "char_start": 542, "char_end": 565, "chars": " self.c.execute(query," } ] }
github.com/philipptrenz/s0-bridge/commit/269b48caa05377b7c58c3e6d1622a4429cb5ba65
util/database.py
cwe-089
create_playlist
def create_playlist(name, db): db.execute( "INSERT INTO playlist (name, video_position) VALUES('{name}', 0);".format(name=name))
def create_playlist(name, db): db.execute( "INSERT INTO playlist (name, video_position) VALUES(%s, 0);", (name,))
{ "deleted": [ { "line_no": 3, "char_start": 47, "char_end": 140, "line": " \"INSERT INTO playlist (name, video_position) VALUES('{name}', 0);\".format(name=name))\n" } ], "added": [ { "line_no": 3, "char_start": 47, "char_end": 125, "line": " \"INSERT INTO playlist (name, video_position) VALUES(%s, 0);\", (name,))" } ] }
{ "deleted": [ { "char_start": 107, "char_end": 115, "chars": "'{name}'" }, { "char_start": 121, "char_end": 128, "chars": ".format" }, { "char_start": 133, "char_end": 138, "chars": "=name" } ], "added": [ { "char_start": 107, "char_end": 109, "chars": "%s" }, { "char_start": 115, "char_end": 117, "chars": ", " }, { "char_start": 122, "char_end": 123, "chars": "," } ] }
github.com/Madmous/playlist/commit/666e52c5f0b8c1f4296e84471637033d9542a7a6
playlist/playlist_repository.py
cwe-089
karma_ask
def karma_ask(name): db = db_connect() cursor = db.cursor() try: cursor.execute( ''' SELECT karma FROM people WHERE name='{}' '''.format(name)) karma = cursor.fetchone() if karma is None: logger.debug('No karma found for name {}'.format(name)) db.close() return karma else: karma = karma[0] logger.debug('karma of {} found for name {}'.format(karma, name)) db.close() return karma except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise
def karma_ask(name): db = db_connect() cursor = db.cursor() try: cursor.execute(''' SELECT karma FROM people WHERE name=%(name)s ''', (name, )) karma = cursor.fetchone() if karma is None: logger.debug('No karma found for name {}'.format(name)) db.close() return karma else: karma = karma[0] logger.debug('karma of {} found for name {}'.format(karma, name)) db.close() return karma except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise
{ "deleted": [ { "line_no": 5, "char_start": 77, "char_end": 101, "line": " cursor.execute(\n" }, { "line_no": 6, "char_start": 101, "char_end": 176, "line": " ''' SELECT karma FROM people WHERE name='{}' '''.format(name))\n" } ], "added": [ { "line_no": 5, "char_start": 77, "char_end": 154, "line": " cursor.execute(''' SELECT karma FROM people WHERE name=%(name)s ''',\n" }, { "line_no": 6, "char_start": 154, "char_end": 187, "line": " (name, ))\n" } ] }
{ "deleted": [ { "char_start": 100, "char_end": 113, "chars": "\n " }, { "char_start": 153, "char_end": 157, "chars": "'{}'" }, { "char_start": 161, "char_end": 168, "chars": ".format" } ], "added": [ { "char_start": 140, "char_end": 148, "chars": "%(name)s" }, { "char_start": 152, "char_end": 177, "chars": ",\n " }, { "char_start": 182, "char_end": 184, "chars": ", " } ] }
github.com/tylarb/KarmaBoi-PCF/commit/c1d00a27d7f6b7eb6f15a3dacd4269654a32c10a
KarmaBoi/dbopts.py
cwe-089
post
def post(self): """ Returns JWT upon login verification """ json_data = request.get_json() if not json_data['email']: return jsonify({"msg": "Missing email"}), 400 data = database_utilities.execute_query( f"""select * from admins where email = '{json_data['email']}'""") if data: email = data[0]['email'] access_token = create_access_token(identity=email) refresh_token = create_refresh_token(identity=email) resp = jsonify({"login": True}) set_access_cookies(resp, access_token) set_refresh_cookies(resp, refresh_token) return resp else: return jsonify({"msg": "User is not an admin"})
def post(self): """ Returns JWT upon login verification """ json_data = request.get_json() if not json_data['email']: return jsonify({"msg": "Missing email"}), 400 data = database_utilities.execute_query( f"""select * from admins where email = %s""", (json_data['email'], )) if data: email = data[0]['email'] access_token = create_access_token(identity=email) refresh_token = create_refresh_token(identity=email) resp = jsonify({"login": True}) set_access_cookies(resp, access_token) set_refresh_cookies(resp, refresh_token) return resp else: return jsonify({"msg": "User is not an admin"})
{ "deleted": [ { "line_no": 8, "char_start": 254, "char_end": 332, "line": " f\"\"\"select * from admins where email = '{json_data['email']}'\"\"\")\n" } ], "added": [ { "line_no": 8, "char_start": 254, "char_end": 336, "line": " f\"\"\"select * from admins where email = %s\"\"\", (json_data['email'], ))\n" } ] }
{ "deleted": [ { "char_start": 305, "char_end": 307, "chars": "'{" }, { "char_start": 325, "char_end": 330, "chars": "}'\"\"\"" } ], "added": [ { "char_start": 305, "char_end": 313, "chars": "%s\"\"\", (" }, { "char_start": 331, "char_end": 334, "chars": ", )" } ] }
github.com/sgosal2/tiger-boards-backend/commit/4670109dd613df2f2fe7e8403ebd149df2b55485
apis/login.py
cwe-089
add_day_data_row
def add_day_data_row(self, ts, data, prev_etotal): if data['power'] > 0: inv_serial = data['source']['serial_id'] query = ''' INSERT INTO DayData ( TimeStamp, Serial, Power, TotalYield ) VALUES ( %s, %s, %s, %s ); ''' % (ts, inv_serial, data['power'], prev_etotal + data['energy']) self.c.execute(query)
def add_day_data_row(self, ts, data, prev_etotal): if data['power'] > 0: inv_serial = data['source']['serial_id'] query = ''' INSERT INTO DayData ( TimeStamp, Serial, Power, TotalYield ) VALUES ( ?, ?, ?, ? ); ''' self.c.execute(query, (ts, inv_serial, data['power'], prev_etotal + data['energy']))
{ "deleted": [ { "line_no": 13, "char_start": 340, "char_end": 363, "line": " %s,\n" }, { "line_no": 14, "char_start": 363, "char_end": 386, "line": " %s,\n" }, { "line_no": 15, "char_start": 386, "char_end": 409, "line": " %s,\n" }, { "line_no": 16, "char_start": 409, "char_end": 431, "line": " %s\n" }, { "line_no": 18, "char_start": 449, "char_end": 530, "line": " ''' % (ts, inv_serial, data['power'], prev_etotal + data['energy'])\n" }, { "line_no": 19, "char_start": 530, "char_end": 563, "line": " self.c.execute(query)\n" } ], "added": [ { "line_no": 13, "char_start": 340, "char_end": 362, "line": " ?,\n" }, { "line_no": 14, "char_start": 362, "char_end": 384, "line": " ?,\n" }, { "line_no": 15, "char_start": 384, "char_end": 406, "line": " ?,\n" }, { "line_no": 16, "char_start": 406, "char_end": 427, "line": " ?\n" }, { "line_no": 18, "char_start": 445, "char_end": 461, "line": " '''\n" }, { "line_no": 19, "char_start": 461, "char_end": 558, "line": " self.c.execute(query, (ts, inv_serial, data['power'], prev_etotal + data['energy']))\n" } ] }
{ "deleted": [ { "char_start": 359, "char_end": 361, "chars": "%s" }, { "char_start": 382, "char_end": 384, "chars": "%s" }, { "char_start": 405, "char_end": 407, "chars": "%s" }, { "char_start": 428, "char_end": 430, "chars": "%s" }, { "char_start": 465, "char_end": 466, "chars": "%" }, { "char_start": 529, "char_end": 562, "chars": "\n self.c.execute(query" } ], "added": [ { "char_start": 359, "char_end": 360, "chars": "?" }, { "char_start": 381, "char_end": 382, "chars": "?" }, { "char_start": 403, "char_end": 404, "chars": "?" }, { "char_start": 425, "char_end": 426, "chars": "?" }, { "char_start": 460, "char_end": 468, "chars": "\n " }, { "char_start": 469, "char_end": 494, "chars": " self.c.execute(query," } ] }
github.com/philipptrenz/s0-bridge/commit/269b48caa05377b7c58c3e6d1622a4429cb5ba65
util/database.py
cwe-089
store_metadata
def store_metadata(self, session, key, mType, value): if (self.idNormalizer is not None): id = self.idNormalizer.process_string(session, id) elif type(id) == unicode: id = id.encode('utf-8') else: id = str(id) self._openContainer(session) query = ("UPDATE %s SET %s = %r WHERE identifier = '%s';" % (self.table, mType, value, id) ) try: self._query(query) except: return None return value
def store_metadata(self, session, key, mType, value): if (self.idNormalizer is not None): id = self.idNormalizer.process_string(session, id) elif type(id) == unicode: id = id.encode('utf-8') else: id = str(id) self._openContainer(session) query = ("UPDATE %s SET %s = $1 WHERE identifier = $2;" % (self.table, mType) ) args = (value, id) try: self._query(query, *args) except: return None return value
{ "deleted": [ { "line_no": 9, "char_start": 311, "char_end": 379, "line": " query = (\"UPDATE %s SET %s = %r WHERE identifier = '%s';\" %\n" }, { "line_no": 10, "char_start": 379, "char_end": 427, "line": " (self.table, mType, value, id)\n" }, { "line_no": 13, "char_start": 459, "char_end": 490, "line": " self._query(query)\n" } ], "added": [ { "line_no": 9, "char_start": 311, "char_end": 377, "line": " query = (\"UPDATE %s SET %s = $1 WHERE identifier = $2;\" %\n" }, { "line_no": 10, "char_start": 377, "char_end": 414, "line": " (self.table, mType)\n" }, { "line_no": 12, "char_start": 433, "char_end": 460, "line": " args = (value, id)\n" }, { "line_no": 14, "char_start": 473, "char_end": 511, "line": " self._query(query, *args)\n" } ] }
{ "deleted": [ { "char_start": 348, "char_end": 350, "chars": "%r" }, { "char_start": 370, "char_end": 374, "chars": "'%s'" }, { "char_start": 414, "char_end": 415, "chars": "," }, { "char_start": 416, "char_end": 422, "chars": "value," }, { "char_start": 423, "char_end": 425, "chars": "id" }, { "char_start": 435, "char_end": 441, "chars": " " } ], "added": [ { "char_start": 348, "char_end": 350, "chars": "$1" }, { "char_start": 370, "char_end": 372, "chars": "$2" }, { "char_start": 441, "char_end": 468, "chars": "args = (value, id)\n " }, { "char_start": 502, "char_end": 509, "chars": ", *args" } ] }
github.com/cheshire3/cheshire3/commit/d350363b4ea10f102c24c8f26d7b76b006323e8e
cheshire3/sql/postgresStore.py
cwe-089
get
def get(self, space_id): """ Fetch data for space with the corresponding space_id """ return database_utilities.execute_query( f"""select * from spaces where space_id = '{space_id}'""")
def get(self, space_id): """ Fetch data for space with the corresponding space_id """ return database_utilities.execute_query( f"""select * from spaces where space_id = %s""", (space_id, ))
{ "deleted": [ { "line_no": 4, "char_start": 147, "char_end": 217, "line": " f\"\"\"select * from spaces where space_id = '{space_id}'\"\"\")\n" } ], "added": [ { "line_no": 4, "char_start": 147, "char_end": 221, "line": " f\"\"\"select * from spaces where space_id = %s\"\"\", (space_id, ))\n" } ] }
{ "deleted": [ { "char_start": 201, "char_end": 203, "chars": "'{" }, { "char_start": 211, "char_end": 216, "chars": "}'\"\"\"" } ], "added": [ { "char_start": 201, "char_end": 209, "chars": "%s\"\"\", (" }, { "char_start": 217, "char_end": 220, "chars": ", )" } ] }
github.com/sgosal2/tiger-boards-backend/commit/4670109dd613df2f2fe7e8403ebd149df2b55485
apis/spaces.py
cwe-089
check
def check(current_num): try: cursor.execute('SELECT * FROM comics WHERE num="%s"' % current_num) except sqlite3.OperationalError: cursor.execute('CREATE TABLE comics (num text)') return False else: return False if cursor.fetchone() is None else True
def check(current_num): try: cursor.execute('SELECT * FROM comics WHERE num=?', (current_num,)) except sqlite3.OperationalError: cursor.execute('CREATE TABLE comics (num text)') return False else: return False if cursor.fetchone() is None else True
{ "deleted": [ { "line_no": 3, "char_start": 33, "char_end": 109, "line": " cursor.execute('SELECT * FROM comics WHERE num=\"%s\"' % current_num)\n" } ], "added": [ { "line_no": 3, "char_start": 33, "char_end": 108, "line": " cursor.execute('SELECT * FROM comics WHERE num=?', (current_num,))\n" } ] }
{ "deleted": [ { "char_start": 88, "char_end": 92, "chars": "\"%s\"" }, { "char_start": 93, "char_end": 95, "chars": " %" } ], "added": [ { "char_start": 88, "char_end": 89, "chars": "?" }, { "char_start": 90, "char_end": 91, "chars": "," }, { "char_start": 92, "char_end": 93, "chars": "(" }, { "char_start": 104, "char_end": 106, "chars": ",)" } ] }
github.com/lord63/a_bunch_of_code/commit/c0d67a1312306fd1257c354bfb5d6cac7643aa29
comics/check_comics.py
cwe-089
delete_crawl
@app.route('/delete_crawl', methods=['POST']) @is_logged_in def delete_crawl(): # Get Form Fields cid = request.form['cid'] # Create cursor cur = mysql.connection.cursor() # Get user by username result = cur.execute("DELETE FROM Crawls WHERE cid = %s" % cid) # Commit to DB mysql.connection.commit() # Close connection cur.close() # FIXME check if successfull first, return message flash('Crawl successfully removed', 'success') return redirect(url_for('dashboard'))
@app.route('/delete_crawl', methods=['POST']) @is_logged_in def delete_crawl(): # Get Form Fields cid = request.form['cid'] # Create cursor cur = mysql.connection.cursor() # Get user by username result = cur.execute("""DELETE FROM Crawls WHERE cid = %s""" (cid,)) # Commit to DB mysql.connection.commit() # Close connection cur.close() # FIXME check if successfull first, return message flash('Crawl successfully removed', 'success') return redirect(url_for('dashboard'))
{ "deleted": [ { "line_no": 12, "char_start": 238, "char_end": 310, "line": " result = cur.execute(\"DELETE FROM Crawls WHERE cid = %s\" % cid)\n" } ], "added": [ { "line_no": 12, "char_start": 238, "char_end": 315, "line": " result = cur.execute(\"\"\"DELETE FROM Crawls WHERE cid = %s\"\"\" (cid,))\n" } ] }
{ "deleted": [ { "char_start": 303, "char_end": 305, "chars": "% " } ], "added": [ { "char_start": 268, "char_end": 270, "chars": "\"\"" }, { "char_start": 304, "char_end": 306, "chars": "\"\"" }, { "char_start": 307, "char_end": 308, "chars": "(" }, { "char_start": 311, "char_end": 313, "chars": ",)" } ] }
github.com/yannvon/table-detection/commit/4bad3673debf0b9491b520f0e22e9186af78c375
bar.py
cwe-089
add_movie
@app.route('/movies/add', methods=['GET', 'POST']) def add_movie(): form = MovieForm() if not form.validate_on_submit(): return render_template('new_movie.html', title='Add New Movie', form=form) lang_id = add_language(form.data['language']) movie = { 'title': '', 'description': '', 'release_year': 0, 'rental_duration': 0, 'rental_rate': 0.00, 'length': 0, 'replacement_cost': 0.00 } for k, v in movie.items(): movie[k] = form.data[k] movie['language_id'] = movie.get('language_id', lang_id) cur.execute( """ INSERT INTO film (title, description, release_year, language_id, rental_duration, rental_rate, length, replacement_cost) VALUES ('{}', '{}', {}, {}, {}, {}, {}, {}) """.format(*[v for k, v in movie.items()]) ) try: cur.execute(f"SELECT * FROM film where fulltext @@ to_tsquery('Dark Knight')") res = cur.fetchall() conn.commit() return redirect(url_for('movies')) except Exception as e: return redirect(url_for('index'))
@app.route('/movies/add', methods=['GET', 'POST']) def add_movie(): form = MovieForm() if not form.validate_on_submit(): return render_template('new_movie.html', title='Add New Movie', form=form) lang_id = add_language(form.data['language']) movie = { 'title': '', 'description': '', 'release_year': 0, 'rental_duration': 0, 'rental_rate': 0.00, 'length': 0, 'replacement_cost': 0.00 } for k, v in movie.items(): movie[k] = form.data[k] movie['language_id'] = movie.get('language_id', lang_id) cur.execute( """ INSERT INTO film (title, description, release_year, language_id, rental_duration, rental_rate, length, replacement_cost) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) """, [(v, ) for k, v in movie.items()] ) try: cur.execute("SELECT * FROM film where fulltext @@ to_tsquery(%s)", (movie['title'], )) res = cur.fetchall() conn.commit() return redirect(url_for('movies')) except Exception as e: return redirect(url_for('index'))
{ "deleted": [ { "line_no": 22, "char_start": 784, "char_end": 836, "line": " VALUES ('{}', '{}', {}, {}, {}, {}, {}, {})\n" }, { "line_no": 23, "char_start": 836, "char_end": 887, "line": " \"\"\".format(*[v for k, v in movie.items()])\n" }, { "line_no": 26, "char_start": 902, "char_end": 989, "line": " cur.execute(f\"SELECT * FROM film where fulltext @@ to_tsquery('Dark Knight')\")\n" } ], "added": [ { "line_no": 22, "char_start": 784, "char_end": 832, "line": " VALUES (%s, %s, %s, %s, %s, %s, %s, %s)\n" }, { "line_no": 23, "char_start": 832, "char_end": 879, "line": " \"\"\", [(v, ) for k, v in movie.items()]\n" }, { "line_no": 26, "char_start": 894, "char_end": 989, "line": " cur.execute(\"SELECT * FROM film where fulltext @@ to_tsquery(%s)\", (movie['title'], ))\n" } ] }
{ "deleted": [ { "char_start": 800, "char_end": 804, "chars": "'{}'" }, { "char_start": 806, "char_end": 810, "chars": "'{}'" }, { "char_start": 812, "char_end": 814, "chars": "{}" }, { "char_start": 816, "char_end": 818, "chars": "{}" }, { "char_start": 820, "char_end": 822, "chars": "{}" }, { "char_start": 824, "char_end": 826, "chars": "{}" }, { "char_start": 828, "char_end": 830, "chars": "{}" }, { "char_start": 832, "char_end": 834, "chars": "{}" }, { "char_start": 847, "char_end": 856, "chars": ".format(*" }, { "char_start": 885, "char_end": 886, "chars": ")" }, { "char_start": 922, "char_end": 923, "chars": "f" }, { "char_start": 973, "char_end": 980, "chars": "Dark Kn" }, { "char_start": 981, "char_end": 983, "chars": "gh" }, { "char_start": 986, "char_end": 987, "chars": "\"" } ], "added": [ { "char_start": 800, "char_end": 802, "chars": "%s" }, { "char_start": 804, "char_end": 806, "chars": "%s" }, { "char_start": 808, "char_end": 810, "chars": "%s" }, { "char_start": 812, "char_end": 814, "chars": "%s" }, { "char_start": 816, "char_end": 818, "chars": "%s" }, { "char_start": 820, "char_end": 822, "chars": "%s" }, { "char_start": 824, "char_end": 826, "chars": "%s" }, { "char_start": 828, "char_end": 830, "chars": "%s" }, { "char_start": 843, "char_end": 846, "chars": ", [" }, { "char_start": 848, "char_end": 851, "chars": ", )" }, { "char_start": 963, "char_end": 968, "chars": "%s)\"," }, { "char_start": 969, "char_end": 978, "chars": "(movie['t" }, { "char_start": 980, "char_end": 982, "chars": "le" }, { "char_start": 983, "char_end": 986, "chars": "], " } ] }
github.com/Elbertbiggs360/dvdrental/commit/ad144ae2a08a332498d0831bc255170d57ba754b
app.py
cwe-089
new_category
def new_category(category_name): try: conn = check_heroku_db() cur = conn.cursor() cur.execute('''INSERT INTO categories (cat_name) VALUES (%s)''', (category_name,)) conn.commit() conn.close() except psycopg2.DatabaseError as e: print('Error %s' % e) sys.exit(1)
def new_category(category_name): try: conn = check_heroku_db() cur = conn.cursor() query = "INSERT INTO categories (cat_name) VALUES (%s);" data = (category_name,) cur.execute(query, data) conn.commit() conn.close() except psycopg2.DatabaseError as e: print('Error %s' % e) sys.exit(1)
{ "deleted": [ { "line_no": 5, "char_start": 103, "char_end": 194, "line": " cur.execute('''INSERT INTO categories (cat_name) VALUES (%s)''', (category_name,))\n" } ], "added": [ { "line_no": 5, "char_start": 103, "char_end": 104, "line": "\n" }, { "line_no": 6, "char_start": 104, "char_end": 169, "line": " query = \"INSERT INTO categories (cat_name) VALUES (%s);\"\n" }, { "line_no": 7, "char_start": 169, "char_end": 201, "line": " data = (category_name,)\n" }, { "line_no": 8, "char_start": 201, "char_end": 234, "line": " cur.execute(query, data)\n" }, { "line_no": 9, "char_start": 234, "char_end": 235, "line": "\n" } ] }
{ "deleted": [ { "char_start": 111, "char_end": 112, "chars": "c" }, { "char_start": 114, "char_end": 126, "chars": ".execute('''" }, { "char_start": 171, "char_end": 175, "chars": "'''," } ], "added": [ { "char_start": 103, "char_end": 104, "chars": "\n" }, { "char_start": 112, "char_end": 113, "chars": "q" }, { "char_start": 115, "char_end": 121, "chars": "ry = \"" }, { "char_start": 166, "char_end": 183, "chars": ";\"\n data =" }, { "char_start": 200, "char_end": 232, "chars": "\n cur.execute(query, data" }, { "char_start": 233, "char_end": 234, "chars": "\n" } ] }
github.com/leeorb321/expenses/commit/f93c0fa4d30787ef16420bfefc52565b98bc7fcf
db.py
cwe-089
get_first_month
def get_first_month(db, scene): sql = "select date from matches where scene='{}' order by date limit 1;".format(scene) res = db.exec(sql) date = res[0][0] return date
def get_first_month(db, scene): sql = "select date from matches where scene='{scene}' order by date limit 1;" args = {'scene': scene} res = db.exec(sql, args) date = res[0][0] return date
{ "deleted": [ { "line_no": 2, "char_start": 32, "char_end": 123, "line": " sql = \"select date from matches where scene='{}' order by date limit 1;\".format(scene)\n" }, { "line_no": 3, "char_start": 123, "char_end": 146, "line": " res = db.exec(sql)\n" } ], "added": [ { "line_no": 2, "char_start": 32, "char_end": 114, "line": " sql = \"select date from matches where scene='{scene}' order by date limit 1;\"\n" }, { "line_no": 3, "char_start": 114, "char_end": 142, "line": " args = {'scene': scene}\n" }, { "line_no": 4, "char_start": 142, "char_end": 171, "line": " res = db.exec(sql, args)\n" } ] }
{ "deleted": [ { "char_start": 108, "char_end": 111, "chars": ".fo" }, { "char_start": 112, "char_end": 116, "chars": "mat(" }, { "char_start": 121, "char_end": 122, "chars": ")" } ], "added": [ { "char_start": 82, "char_end": 87, "chars": "scene" }, { "char_start": 113, "char_end": 119, "chars": "\n a" }, { "char_start": 120, "char_end": 127, "chars": "gs = {'" }, { "char_start": 132, "char_end": 141, "chars": "': scene}" }, { "char_start": 163, "char_end": 169, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
bracket_utils.py
cwe-089
getOptions
def getOptions(poll_name): conn, c = connectDB() options_str = queryOne(c, "SELECT options FROM {} WHERE name='{}'".format(CFG("poll_table_name"), poll_name)) if options_str == None: return None options = options_str.split(",") closeDB(conn) return options
def getOptions(poll_name): conn, c = connectDB() req = "SELECT options FROM {} WHERE name=?".format(CFG("poll_table_name")) options_str = queryOne(c, req, (poll_name,)) if options_str == None: return None options = options_str.split(",") closeDB(conn) return options
{ "deleted": [ { "line_no": 3, "char_start": 53, "char_end": 167, "line": " options_str = queryOne(c, \"SELECT options FROM {} WHERE name='{}'\".format(CFG(\"poll_table_name\"), poll_name))\n" } ], "added": [ { "line_no": 3, "char_start": 53, "char_end": 132, "line": " req = \"SELECT options FROM {} WHERE name=?\".format(CFG(\"poll_table_name\"))\n" }, { "line_no": 4, "char_start": 132, "char_end": 181, "line": " options_str = queryOne(c, req, (poll_name,))\n" } ] }
{ "deleted": [ { "char_start": 57, "char_end": 67, "chars": "options_st" }, { "char_start": 71, "char_end": 83, "chars": "queryOne(c, " }, { "char_start": 118, "char_end": 122, "chars": "'{}'" } ], "added": [ { "char_start": 58, "char_end": 60, "chars": "eq" }, { "char_start": 98, "char_end": 99, "chars": "?" }, { "char_start": 130, "char_end": 160, "chars": ")\n options_str = queryOne(c" }, { "char_start": 162, "char_end": 168, "chars": "req, (" }, { "char_start": 177, "char_end": 178, "chars": "," } ] }
github.com/FAUSheppy/simple-python-poll/commit/186c5ff5cdf58272e253a1bb432419ee50d93109
database.py
cwe-089
retrieve_last_video_position
def retrieve_last_video_position(playlist_id, db): db.execute("SELECT max(position) as position from video WHERE playlist_id={playlist_id};".format( playlist_id=playlist_id)) row = db.fetchone() return row['position']
def retrieve_last_video_position(playlist_id, db): db.execute( "SELECT max(position) as position from video WHERE playlist_id=%s;", (playlist_id,)) row = db.fetchone() return row['position']
{ "deleted": [ { "line_no": 2, "char_start": 51, "char_end": 153, "line": " db.execute(\"SELECT max(position) as position from video WHERE playlist_id={playlist_id};\".format(\n" }, { "line_no": 3, "char_start": 153, "char_end": 187, "line": " playlist_id=playlist_id))\n" } ], "added": [ { "line_no": 2, "char_start": 51, "char_end": 67, "line": " db.execute(\n" }, { "line_no": 3, "char_start": 67, "char_end": 160, "line": " \"SELECT max(position) as position from video WHERE playlist_id=%s;\", (playlist_id,))\n" } ] }
{ "deleted": [ { "char_start": 129, "char_end": 136, "chars": "{playli" }, { "char_start": 137, "char_end": 142, "chars": "t_id}" }, { "char_start": 144, "char_end": 158, "chars": ".format(\n " }, { "char_start": 159, "char_end": 173, "chars": " playlist_id=" } ], "added": [ { "char_start": 66, "char_end": 75, "chars": "\n " }, { "char_start": 138, "char_end": 139, "chars": "%" }, { "char_start": 142, "char_end": 144, "chars": ", " }, { "char_start": 156, "char_end": 157, "chars": "," } ] }
github.com/Madmous/playlist/commit/666e52c5f0b8c1f4296e84471637033d9542a7a6
video/video_repository.py
cwe-089
view_page_history
@app.route('/<page_name>/history') def view_page_history(page_name): query = db.query("select page_content.timestamp, page_content.id from page, page_content where page.id = page_content.page_id and page.page_name = '%s'" % page_name) page_histories = query.namedresult() return render_template( 'page_history.html', page_name = page_name, page_histories = page_histories )
@app.route('/<page_name>/history') def view_page_history(page_name): query = db.query("select page_content.timestamp, page_content.id from page, page_content where page.id = page_content.page_id and page.page_name = $1", page_name) page_histories = query.namedresult() return render_template( 'page_history.html', page_name = page_name, page_histories = page_histories )
{ "deleted": [ { "line_no": 3, "char_start": 69, "char_end": 239, "line": " query = db.query(\"select page_content.timestamp, page_content.id from page, page_content where page.id = page_content.page_id and page.page_name = '%s'\" % page_name)\n" } ], "added": [ { "line_no": 3, "char_start": 69, "char_end": 236, "line": " query = db.query(\"select page_content.timestamp, page_content.id from page, page_content where page.id = page_content.page_id and page.page_name = $1\", page_name)\n" } ] }
{ "deleted": [ { "char_start": 220, "char_end": 224, "chars": "'%s'" }, { "char_start": 225, "char_end": 227, "chars": " %" } ], "added": [ { "char_start": 220, "char_end": 222, "chars": "$1" }, { "char_start": 223, "char_end": 224, "chars": "," } ] }
github.com/Pumala/python_wiki_app_redo/commit/65d60747cd8efb05970304234d3bd949d2088e8b
server.py
cwe-089
add_input
def add_input(self, data): connection = self.connects() try: # The following introduces a deliberate security flaw. See section on SQL injecton below query = "INSERT INTO crimes (description) VALUES ('{}');".format( data) with connection.cursor() as cursor: cursor.execute(query) connection.commit() finally: connection.close()
def add_input(self, data): connection = self.connects() try: # The following introduces a deliberate security flaw. See section on SQL injecton below query = "INSERT INTO crimes (description) VALUES (%s);" with connection.cursor() as cursor: cursor.execute(query, data) connection.commit() finally: connection.close()
{ "deleted": [ { "line_no": 5, "char_start": 182, "char_end": 260, "line": " query = \"INSERT INTO crimes (description) VALUES ('{}');\".format(\n" }, { "line_no": 6, "char_start": 260, "char_end": 282, "line": " data)\n" }, { "line_no": 8, "char_start": 330, "char_end": 368, "line": " cursor.execute(query)\n" } ], "added": [ { "line_no": 5, "char_start": 182, "char_end": 250, "line": " query = \"INSERT INTO crimes (description) VALUES (%s);\"\n" }, { "line_no": 7, "char_start": 298, "char_end": 342, "line": " cursor.execute(query, data)\n" } ] }
{ "deleted": [ { "char_start": 244, "char_end": 248, "chars": "'{}'" }, { "char_start": 251, "char_end": 281, "chars": ".format(\n data)" } ], "added": [ { "char_start": 244, "char_end": 246, "chars": "%s" }, { "char_start": 334, "char_end": 340, "chars": ", data" } ] }
github.com/JeremiahO/crimemap/commit/c17537fcd7aa4e2a26f7ca5cefaeb356ff646858
dbhelper.py
cwe-089
get_top_author
def get_top_author(top_num): """ query the top(top_num) popular author top_num => list of [author, count] """ cmd = """SELECT authors.name,author_result.num FROM authors JOIN (SELECT SUM(article_result.num) as num, article_result.author from (SELECT articles.title, articles.author, SUM(log.views) AS num FROM articles INNER JOIN ( SELECT path, count(path) AS views FROM log GROUP BY log.path ) AS log ON log.path = '/article/' || articles.slug GROUP BY articles.title, articles.author) AS article_result GROUP BY article_result.author) as author_result ON authors.id = author_result.author ORDER BY num DESC LIMIT {}""".format(top_num) return execute_query(cmd)
def get_top_author(top_num): """ query the top(top_num) popular author top_num => list of [author, count] """ cmd = """SELECT authors.name,author_result.num FROM authors JOIN (SELECT SUM(article_result.num) as num, article_result.author from (SELECT articles.title, articles.author, SUM(log.views) AS num FROM articles INNER JOIN ( SELECT path, count(path) AS views FROM log GROUP BY log.path ) AS log ON log.path = '/article/' || articles.slug GROUP BY articles.title, articles.author) AS article_result GROUP BY article_result.author) as author_result ON authors.id = author_result.author ORDER BY num DESC LIMIT %s""" data = [top_num, ] return execute_query(cmd, data)
{ "deleted": [ { "line_no": 21, "char_start": 931, "char_end": 998, "line": " ORDER BY num DESC LIMIT {}\"\"\".format(top_num)\r\n" }, { "line_no": 22, "char_start": 998, "char_end": 1027, "line": " return execute_query(cmd)\r\n" } ], "added": [ { "line_no": 21, "char_start": 931, "char_end": 982, "line": " ORDER BY num DESC LIMIT %s\"\"\"\r\n" }, { "line_no": 22, "char_start": 982, "char_end": 1006, "line": " data = [top_num, ]\r\n" }, { "line_no": 23, "char_start": 1006, "char_end": 1041, "line": " return execute_query(cmd, data)\r\n" } ] }
{ "deleted": [ { "char_start": 975, "char_end": 977, "chars": "{}" }, { "char_start": 980, "char_end": 985, "chars": ".form" }, { "char_start": 987, "char_end": 988, "chars": "(" }, { "char_start": 995, "char_end": 996, "chars": ")" } ], "added": [ { "char_start": 975, "char_end": 977, "chars": "%s" }, { "char_start": 980, "char_end": 987, "chars": "\r\n d" }, { "char_start": 989, "char_end": 994, "chars": "a = [" }, { "char_start": 1001, "char_end": 1004, "chars": ", ]" }, { "char_start": 1034, "char_end": 1040, "chars": ", data" } ] }
github.com/thugasin/udacity-homework-logAnalyzer/commit/506f25f9a1caee7f17034adf7c75e0efbc88082b
logAnalyzerDb.py
cwe-089
get_all_referrers
@app.route('/get_all_referrers') def get_all_referrers(): account_id = request.args.get('account_id') if not isObject(account_id): ws.send('{"id":1, "method":"call", "params":[0,"lookup_account_names",[["' + account_id + '"], 0]]}') result_l = ws.recv() j_l = json.loads(result_l) account_id = j_l["result"][0]["id"] con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "select * from referrers where referrer='"+account_id+"'" cur.execute(query) results = cur.fetchall() return jsonify(results)
@app.route('/get_all_referrers') def get_all_referrers(): account_id = request.args.get('account_id') if not isObject(account_id): ws.send('{"id":1, "method":"call", "params":[0,"lookup_account_names",[["' + account_id + '"], 0]]}') result_l = ws.recv() j_l = json.loads(result_l) account_id = j_l["result"][0]["id"] con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "select * from referrers where referrer=%s" cur.execute(query, (account_id,)) results = cur.fetchall() return jsonify(results)
{ "deleted": [ { "line_no": 15, "char_start": 430, "char_end": 500, "line": " query = \"select * from referrers where referrer='\"+account_id+\"'\"\n" }, { "line_no": 16, "char_start": 500, "char_end": 523, "line": " cur.execute(query)\n" } ], "added": [ { "line_no": 15, "char_start": 430, "char_end": 486, "line": " query = \"select * from referrers where referrer=%s\"\n" }, { "line_no": 16, "char_start": 486, "char_end": 524, "line": " cur.execute(query, (account_id,))\n" } ] }
{ "deleted": [ { "char_start": 482, "char_end": 498, "chars": "'\"+account_id+\"'" } ], "added": [ { "char_start": 482, "char_end": 484, "chars": "%s" }, { "char_start": 507, "char_end": 522, "chars": ", (account_id,)" } ] }
github.com/VinChain/vinchain-python-api-backend/commit/b78088a551fbb712121269c6eb7f43ede120ff60
api.py
cwe-089
add_translationname
def add_translationname(self, trname): """Add new translation by item name for an item.""" if self.connection: for item in self.find_item_name([trname[0], '0']): self.cursor.execute('insert into itemtranslation (itemid, itemlanguageid, translation) values ("%s", "%s", "%s")' % (item[0], trname[1], trname[2])) self.connection.commit()
def add_translationname(self, trname): """Add new translation by item name for an item.""" if self.connection: for item in self.find_item_name([trname[0], '0']): t = (item[0], trname[1], trname[2], ) self.cursor.execute('insert into itemtranslation (itemid, itemlanguageid, translation) values (?, ?, ?)', t) self.connection.commit()
{ "deleted": [ { "line_no": 5, "char_start": 194, "char_end": 359, "line": " self.cursor.execute('insert into itemtranslation (itemid, itemlanguageid, translation) values (\"%s\", \"%s\", \"%s\")' % (item[0], trname[1], trname[2]))\n" } ], "added": [ { "line_no": 5, "char_start": 194, "char_end": 248, "line": " t = (item[0], trname[1], trname[2], )\n" }, { "line_no": 6, "char_start": 248, "char_end": 373, "line": " self.cursor.execute('insert into itemtranslation (itemid, itemlanguageid, translation) values (?, ?, ?)', t)\n" }, { "line_no": 7, "char_start": 373, "char_end": 409, "line": " self.connection.commit()\n" } ] }
{ "deleted": [ { "char_start": 305, "char_end": 309, "chars": "\"%s\"" }, { "char_start": 311, "char_end": 315, "chars": "\"%s\"" }, { "char_start": 317, "char_end": 321, "chars": "\"%s\"" }, { "char_start": 323, "char_end": 334, "chars": " % (item[0]" }, { "char_start": 337, "char_end": 357, "chars": "rname[1], trname[2])" } ], "added": [ { "char_start": 210, "char_end": 264, "chars": "t = (item[0], trname[1], trname[2], )\n " }, { "char_start": 359, "char_end": 360, "chars": "?" }, { "char_start": 362, "char_end": 363, "chars": "?" }, { "char_start": 365, "char_end": 366, "chars": "?" } ] }
github.com/ecosl-developers/ecosl/commit/8af050a513338bf68ff2a243e4a2482d24e9aa3a
ecosldb/ecosldb.py
cwe-089
get_max_task_id_for_project
@staticmethod def get_max_task_id_for_project(project_id: int): """Gets the nights task id currently in use on a project""" sql = """select max(id) from tasks where project_id = {0} GROUP BY project_id""".format(project_id) result = db.engine.execute(sql) if result.rowcount == 0: raise NotFound() for row in result: return row[0]
@staticmethod def get_max_task_id_for_project(project_id: int): """Gets the nights task id currently in use on a project""" sql = """select max(id) from tasks where project_id = :project_id GROUP BY project_id""" result = db.engine.execute(text(sql), project_id=project_id) if result.rowcount == 0: raise NotFound() for row in result: return row[0]
{ "deleted": [ { "line_no": 4, "char_start": 140, "char_end": 248, "line": " sql = \"\"\"select max(id) from tasks where project_id = {0} GROUP BY project_id\"\"\".format(project_id)\n" }, { "line_no": 5, "char_start": 248, "char_end": 288, "line": " result = db.engine.execute(sql)\n" } ], "added": [ { "line_no": 4, "char_start": 140, "char_end": 237, "line": " sql = \"\"\"select max(id) from tasks where project_id = :project_id GROUP BY project_id\"\"\"\n" }, { "line_no": 5, "char_start": 237, "char_end": 306, "line": " result = db.engine.execute(text(sql), project_id=project_id)\n" } ] }
{ "deleted": [ { "char_start": 202, "char_end": 205, "chars": "{0}" }, { "char_start": 228, "char_end": 247, "chars": ".format(project_id)" } ], "added": [ { "char_start": 202, "char_end": 213, "chars": ":project_id" }, { "char_start": 272, "char_end": 277, "chars": "text(" }, { "char_start": 280, "char_end": 304, "chars": "), project_id=project_id" } ] }
github.com/hotosm/tasking-manager/commit/dee040a2d22b3c4d5e38e2dbf8c6b651ad4c241a
server/models/postgis/task.py
cwe-089
findNPC
def findNPC(race, classe, sex,level): c, conn = getConnection() date = now() #select image, SUM(legit) as l FROM npc WHERE race='Elf' AND class='Bard' AND sex='Male' GROUP BY image HAVING l>5 ORDER BY SUM(legit) DESC; c.execute("select image, avg(legit) as l FROM npc WHERE race='"+race+"' AND class='"+classe+"' AND sex='"+sex+"' GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC;") conn.commit() out = c.fetchmany(5) conn.close() return out
def findNPC(race, classe, sex,level): c, conn = getConnection() date = now() #select image, SUM(legit) as l FROM npc WHERE race='Elf' AND class='Bard' AND sex='Male' GROUP BY image HAVING l>5 ORDER BY SUM(legit) DESC; c.execute("select image, avg(legit) as l FROM npc WHERE race=(?) AND class=(?) AND sex=(?) GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC",(race,classe,sex)) conn.commit() out = c.fetchmany(5) conn.close() return out
{ "deleted": [ { "line_no": 5, "char_start": 221, "char_end": 391, "line": "\tc.execute(\"select image, avg(legit) as l FROM npc WHERE race='\"+race+\"' AND class='\"+classe+\"' AND sex='\"+sex+\"' GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC;\")\n" } ], "added": [ { "line_no": 5, "char_start": 221, "char_end": 386, "line": "\tc.execute(\"select image, avg(legit) as l FROM npc WHERE race=(?) AND class=(?) AND sex=(?) GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC\",(race,classe,sex))\n" } ] }
{ "deleted": [ { "char_start": 283, "char_end": 293, "chars": "'\"+race+\"'" }, { "char_start": 304, "char_end": 316, "chars": "'\"+classe+\"'" }, { "char_start": 325, "char_end": 334, "chars": "'\"+sex+\"'" }, { "char_start": 387, "char_end": 388, "chars": ";" } ], "added": [ { "char_start": 283, "char_end": 286, "chars": "(?)" }, { "char_start": 297, "char_end": 300, "chars": "(?)" }, { "char_start": 309, "char_end": 312, "chars": "(?)" }, { "char_start": 366, "char_end": 384, "chars": ",(race,classe,sex)" } ] }
github.com/DangerBlack/DungeonsAndDragonsMasterBot/commit/63f980c6dff746f5fcf3005d0646b6c24f81cdc0
database.py
cwe-089
edit
@mod.route('/edit/<int:msg_id>', methods=['GET', 'POST']) def edit(msg_id): m = None if request.method == 'GET': sql = "SELECT * FROM message where msg_id = %d;" % (msg_id) cursor.execute(sql) m = cursor.fetchone() return render_template('message/edit.html', m=m, msg_id=msg_id) if request.method == 'POST': content = request.form['content'] sql = "UPDATE message SET content = '%s' where msg_id = '%d';" \ % (content, msg_id) cursor.execute(sql) conn.commit() flash('Edit Success!') return redirect(url_for('show_entries')) return render_template('message/edit.html', m=m, msg_id=msg_id)
@mod.route('/edit/<int:msg_id>', methods=['GET', 'POST']) def edit(msg_id): m = None if request.method == 'GET': cursor.execute("SELECT * FROM message where msg_id = %s;", (msg_id,)) m = cursor.fetchone() return render_template('message/edit.html', m=m, msg_id=msg_id) if request.method == 'POST': content = request.form['content'] cursor.execute("UPDATE message SET content = %s where msg_id = %s;", (content, msg_id)) conn.commit() flash('Edit Success!') return redirect(url_for('show_entries')) return render_template('message/edit.html', m=m, msg_id=msg_id)
{ "deleted": [ { "line_no": 5, "char_start": 121, "char_end": 189, "line": " sql = \"SELECT * FROM message where msg_id = %d;\" % (msg_id)\n" }, { "line_no": 6, "char_start": 189, "char_end": 217, "line": " cursor.execute(sql)\n" }, { "line_no": 12, "char_start": 395, "char_end": 468, "line": " sql = \"UPDATE message SET content = '%s' where msg_id = '%d';\" \\\n" }, { "line_no": 13, "char_start": 468, "char_end": 500, "line": " % (content, msg_id)\n" }, { "line_no": 14, "char_start": 500, "char_end": 528, "line": " cursor.execute(sql)\n" } ], "added": [ { "line_no": 5, "char_start": 121, "char_end": 199, "line": " cursor.execute(\"SELECT * FROM message where msg_id = %s;\", (msg_id,))\n" }, { "line_no": 11, "char_start": 377, "char_end": 473, "line": " cursor.execute(\"UPDATE message SET content = %s where msg_id = %s;\", (content, msg_id))\n" } ] }
{ "deleted": [ { "char_start": 130, "char_end": 135, "chars": "ql = " }, { "char_start": 174, "char_end": 175, "chars": "d" }, { "char_start": 177, "char_end": 179, "chars": " %" }, { "char_start": 188, "char_end": 215, "chars": "\n cursor.execute(sql" }, { "char_start": 404, "char_end": 409, "chars": "ql = " }, { "char_start": 439, "char_end": 440, "chars": "'" }, { "char_start": 442, "char_end": 443, "chars": "'" }, { "char_start": 459, "char_end": 460, "chars": "'" }, { "char_start": 461, "char_end": 463, "chars": "d'" }, { "char_start": 465, "char_end": 481, "chars": " \\\n %" }, { "char_start": 499, "char_end": 526, "chars": "\n cursor.execute(sql" } ], "added": [ { "char_start": 129, "char_end": 132, "chars": "cur" }, { "char_start": 133, "char_end": 144, "chars": "or.execute(" }, { "char_start": 183, "char_end": 184, "chars": "s" }, { "char_start": 186, "char_end": 187, "chars": "," }, { "char_start": 195, "char_end": 196, "chars": "," }, { "char_start": 385, "char_end": 388, "chars": "cur" }, { "char_start": 389, "char_end": 400, "chars": "or.execute(" }, { "char_start": 449, "char_end": 450, "chars": "s" }, { "char_start": 452, "char_end": 453, "chars": "," } ] }
github.com/ulyssetsd/bjtu-sql/commit/17d7b21864b72ba5666f15236474a93268b32ec9
flaskr/flaskr/views/message.py
cwe-089
add_input
def add_input(self,data): connection = self.connect() try: query = "INSERT INTO crimes (description) VALUES ('{}');".format(data) with connection.cursor() as cursor: cursor.execute(query) connection.commit() finally: connection.close()
def add_input(self,data): connection = self.connect() try: query = "INSERT INTO crimes (description) VALUES (%s);" with connection.cursor() as cursor: cursor.execute(query,data) connection.commit() finally: connection.close()
{ "deleted": [ { "line_no": 5, "char_start": 80, "char_end": 163, "line": " query = \"INSERT INTO crimes (description) VALUES ('{}');\".format(data)\n" }, { "line_no": 7, "char_start": 211, "char_end": 249, "line": " cursor.execute(query)\n" } ], "added": [ { "line_no": 5, "char_start": 80, "char_end": 148, "line": " query = \"INSERT INTO crimes (description) VALUES (%s);\"\n" }, { "line_no": 7, "char_start": 196, "char_end": 239, "line": " cursor.execute(query,data)\n" } ] }
{ "deleted": [ { "char_start": 142, "char_end": 146, "chars": "'{}'" }, { "char_start": 149, "char_end": 162, "chars": ".format(data)" } ], "added": [ { "char_start": 142, "char_end": 144, "chars": "%s" }, { "char_start": 232, "char_end": 237, "chars": ",data" } ] }
github.com/sgnab/crime-map-app/commit/209b23bad13594c9cdf18d8788fcba7c8f68d37b
dbhelper.py
cwe-089
fetch_resultSet
def fetch_resultSet(self, session, id): self._openContainer(session) sid = str(id) if (self.idNormalizer is not None): sid = self.idNormalizer.process_string(session, sid) query = ("SELECT class, data FROM %s WHERE identifier = '%s';" % (self.table, sid) ) res = self._query(query) try: rdict = res.dictresult()[0] except IndexError: raise ObjectDoesNotExistException('%s/%s' % (self.id, sid)) data = rdict['data'] try: ndata = pg.unescape_bytea(data) except: # Insufficient PyGreSQL version ndata = data.replace("\\'", "'") ndata = ndata.replace('\\000', '\x00') ndata = ndata.replace('\\012', '\n') # data is res.dictresult() cl = rdict['class'] rset = dynamic.buildObject(session, cl, [[]]) rset.deserialize(session, ndata) rset.id = id # Update expires now = time.time() nowStr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now)) expires = now + self.get_default(session, 'expires', 600) rset.timeExpires = expires expiresStr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(expires)) query = ("UPDATE %s SET timeAccessed = '%s', expires = '%s' " "WHERE identifier = '%s';" % (self.table, nowStr, expiresStr, sid) ) self._query(query) return rset
def fetch_resultSet(self, session, id): self._openContainer(session) sid = str(id) if (self.idNormalizer is not None): sid = self.idNormalizer.process_string(session, sid) query = ("SELECT class, data FROM %s WHERE identifier = $1;" % (self.table) ) res = self._query(query, sid) try: rdict = res.dictresult()[0] except IndexError: raise ObjectDoesNotExistException('%s/%s' % (self.id, sid)) data = rdict['data'] try: ndata = pg.unescape_bytea(data) except: # Insufficient PyGreSQL version ndata = data.replace("\\'", "'") ndata = ndata.replace('\\000', '\x00') ndata = ndata.replace('\\012', '\n') # data is res.dictresult() cl = rdict['class'] rset = dynamic.buildObject(session, cl, [[]]) rset.deserialize(session, ndata) rset.id = id # Update expires now = time.time() nowStr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now)) expires = now + self.get_default(session, 'expires', 600) rset.timeExpires = expires expiresStr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(expires)) query = ("UPDATE %s SET timeAccessed = $1, expires = $2 " "WHERE identifier = $3;" % (self.table) ) self._query(query, nowStr, expiresStr, sid) return rset
{ "deleted": [ { "line_no": 7, "char_start": 213, "char_end": 286, "line": " query = (\"SELECT class, data FROM %s WHERE identifier = '%s';\" %\n" }, { "line_no": 8, "char_start": 286, "char_end": 321, "line": " (self.table, sid)\n" }, { "line_no": 10, "char_start": 340, "char_end": 373, "line": " res = self._query(query)\n" }, { "line_no": 38, "char_start": 1291, "char_end": 1361, "line": " query = (\"UPDATE %s SET timeAccessed = '%s', expires = '%s' \"\n" }, { "line_no": 39, "char_start": 1361, "char_end": 1407, "line": " \"WHERE identifier = '%s';\" %\n" }, { "line_no": 40, "char_start": 1407, "char_end": 1462, "line": " (self.table, nowStr, expiresStr, sid)\n" }, { "line_no": 42, "char_start": 1481, "char_end": 1508, "line": " self._query(query)\n" } ], "added": [ { "line_no": 7, "char_start": 213, "char_end": 284, "line": " query = (\"SELECT class, data FROM %s WHERE identifier = $1;\" %\n" }, { "line_no": 8, "char_start": 284, "char_end": 314, "line": " (self.table)\n" }, { "line_no": 10, "char_start": 333, "char_end": 371, "line": " res = self._query(query, sid)\n" }, { "line_no": 38, "char_start": 1289, "char_end": 1355, "line": " query = (\"UPDATE %s SET timeAccessed = $1, expires = $2 \"\n" }, { "line_no": 39, "char_start": 1355, "char_end": 1412, "line": " \"WHERE identifier = $3;\" % (self.table)\n" }, { "line_no": 41, "char_start": 1431, "char_end": 1483, "line": " self._query(query, nowStr, expiresStr, sid)\n" } ] }
{ "deleted": [ { "char_start": 277, "char_end": 281, "chars": "'%s'" }, { "char_start": 314, "char_end": 319, "chars": ", sid" }, { "char_start": 1338, "char_end": 1342, "chars": "'%s'" }, { "char_start": 1354, "char_end": 1358, "chars": "'%s'" }, { "char_start": 1398, "char_end": 1402, "chars": "'%s'" }, { "char_start": 1406, "char_end": 1423, "chars": "\n " }, { "char_start": 1435, "char_end": 1460, "chars": ", nowStr, expiresStr, sid" } ], "added": [ { "char_start": 277, "char_end": 279, "chars": "$1" }, { "char_start": 364, "char_end": 369, "chars": ", sid" }, { "char_start": 1336, "char_end": 1338, "chars": "$1" }, { "char_start": 1350, "char_end": 1352, "chars": "$2" }, { "char_start": 1392, "char_end": 1394, "chars": "$3" }, { "char_start": 1456, "char_end": 1481, "chars": ", nowStr, expiresStr, sid" } ] }
github.com/cheshire3/cheshire3/commit/d350363b4ea10f102c24c8f26d7b76b006323e8e
cheshire3/sql/resultSetStore.py
cwe-089
init_user
def init_user(username, chat_id): conn = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\users\\" + username + '.db') conn2 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\cf.db') cursor = conn.cursor() cursor2 = conn2.cursor() cursor.execute("CREATE TABLE result (problem INTEGER, diff STRING, verdict STRING)") cursor2.execute("SELECT * FROM problems") x = cursor2.fetchone() while x != None: cursor.execute("insert into result values (?, ?, ? )", (x[0], x[1], "NULL")) x = cursor2.fetchone() url = 'http://codeforces.com/submissions/' + username r = requests.get(url) max_page = 1 soup = BeautifulSoup(r.text, "lxml") for link in soup.find_all(attrs={"class": "page-index"}): s = link.find('a') s2 = s.get("href").split('/') max_page = max(max_page, int(s2[4])) old = "" r = requests.get('http://codeforces.com/submissions/' + username + '/page/0') soup = BeautifulSoup(r.text, "lxml") last_try = soup.find(attrs={"class":"status-small"}) if not last_try == None: last_try = str(last_try).split() last_try = str(last_try[2]) + str(last_try[3]) for i in range(1, max_page + 1): r = requests.get('http://codeforces.com/submissions/' + username + '/page/' + str(i)) soup = BeautifulSoup(r.text, "lxml") count = 0 ver = soup.find_all(attrs={"class": "submissionVerdictWrapper"}) for link in soup.find_all('a'): s = link.get('href') if s != None and s.find('/problemset') != -1: s = s.split('/') if len(s) == 5: s2 = str(ver[count]).split() s2 = s2[5].split('\"') count += 1 cursor.execute("select * from result where problem = '" + s[3] + "'and diff = '" + s[4] + "'") x = cursor.fetchone() if s2[1] == 'OK' and x != None: cursor.execute("update result set verdict = '" + s2[1] + "' where problem = '" + s[3] + "' and diff = '" + s[4] + "'") if x != None and x[2] != 'OK': cursor.execute("update result set verdict = '" + s2[1] +"' where problem = '" + s[3] + "' and diff = '" + s[4] + "'") conn.commit() conn.close() conn2.close() settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\settings.db") conn = settings.cursor() conn.execute("select * from last_update_problemset") last_problem = conn.fetchone() conn.execute("select * from users where chat_id = '" + str(chat_id) + "'") x = conn.fetchone() if x == None: conn.execute("insert into users values (?, ?, ?, ?, ?)", (chat_id, username, str(last_try), str(last_problem[0]), 1)) else: conn.execute("update users set username = '" + str(username) + "' where chat_id = '" + str(chat_id) + "'") conn.execute("update users set last_update = '" + str(last_try) + "' where chat_id = '" + str(chat_id) + "'") conn.execute("update users set last_problem = '" + str(last_problem[0]) + "' where chat_id = '" + str(chat_id) + "'") conn.execute("update users set state = '" + str(1) + "' where chat_id = '" + str(chat_id) + "'") settings.commit() settings.close()
def init_user(username, chat_id): conn = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\users\\" + username + '.db') conn2 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\cf.db') cursor = conn.cursor() cursor2 = conn2.cursor() cursor.execute("CREATE TABLE result (problem INTEGER, diff STRING, verdict STRING)") cursor2.execute("SELECT * FROM problems") x = cursor2.fetchone() while x != None: cursor.execute("insert into result values (?, ?, ? )", (x[0], x[1], "NULL")) x = cursor2.fetchone() url = 'http://codeforces.com/submissions/' + username r = requests.get(url) max_page = 1 soup = BeautifulSoup(r.text, "lxml") for link in soup.find_all(attrs={"class": "page-index"}): s = link.find('a') s2 = s.get("href").split('/') max_page = max(max_page, int(s2[4])) r = requests.get('http://codeforces.com/submissions/' + username + '/page/0') soup = BeautifulSoup(r.text, "lxml") last_try = soup.find(attrs={"class":"status-small"}) if not last_try == None: last_try = str(last_try).split() last_try = str(last_try[2]) + str(last_try[3]) for i in range(1, max_page + 1): r = requests.get('http://codeforces.com/submissions/' + username + '/page/' + str(i)) soup = BeautifulSoup(r.text, "lxml") count = 0 ver = soup.find_all(attrs={"class": "submissionVerdictWrapper"}) for link in soup.find_all('a'): s = link.get('href') if s != None and s.find('/problemset') != -1: s = s.split('/') if len(s) == 5: s2 = str(ver[count]).split() s2 = s2[5].split('\"') count += 1 cursor.execute("select * from result where problem = ? and diff = ?", (s[3], s[4])) x = cursor.fetchone() if s2[1] == 'OK' and x != None: cursor.execute("update result set verdict = ? where problem = ? and diff = ?", (s2[1], s[3], s[4])) if x != None and x[2] != 'OK': cursor.execute("update result set verdict = ? where problem = ? and diff = ?", (s2[1], s[3], s[4])) conn.commit() conn.close() conn2.close() settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\settings.db") conn = settings.cursor() conn.execute("select * from last_update_problemset") last_problem = conn.fetchone() conn.execute("select * from users where chat_id = ?", (str(chat_id),)) x = conn.fetchone() if x == None: conn.execute("insert into users values (?, ?, ?, ?, ?)", (chat_id, username, str(last_try), str(last_problem[0]), 1)) else: conn.execute("update users set username = ? where chat_id = ?", (str(username), str(chat_id))) conn.execute("update users set last_update = ? where chat_id = ?", (str(last_try), str(chat_id))) conn.execute("update users set last_problem = ? where chat_id = ?", (str(last_problem[0]), str(chat_id))) conn.execute("update users set state = ? where chat_id = ?", (str(1), str(chat_id))) settings.commit() settings.close()
{ "deleted": [ { "line_no": 22, "char_start": 893, "char_end": 894, "line": "\n" }, { "line_no": 23, "char_start": 894, "char_end": 907, "line": " old = \"\"\n" }, { "line_no": 44, "char_start": 1799, "char_end": 1914, "line": " cursor.execute(\"select * from result where problem = '\" + s[3] + \"'and diff = '\" + s[4] + \"'\")\n" }, { "line_no": 47, "char_start": 2008, "char_end": 2151, "line": " cursor.execute(\"update result set verdict = '\" + s2[1] + \"' where problem = '\" + s[3] + \"' and diff = '\" + s[4] + \"'\")\n" }, { "line_no": 49, "char_start": 2202, "char_end": 2344, "line": " cursor.execute(\"update result set verdict = '\" + s2[1] +\"' where problem = '\" + s[3] + \"' and diff = '\" + s[4] + \"'\")\n" }, { "line_no": 50, "char_start": 2344, "char_end": 2345, "line": "\n" }, { "line_no": 54, "char_start": 2398, "char_end": 2399, "line": "\n" }, { "line_no": 59, "char_start": 2613, "char_end": 2692, "line": " conn.execute(\"select * from users where chat_id = '\" + str(chat_id) + \"'\")\n" }, { "line_no": 64, "char_start": 2870, "char_end": 2985, "line": " conn.execute(\"update users set username = '\" + str(username) + \"' where chat_id = '\" + str(chat_id) + \"'\")\n" }, { "line_no": 65, "char_start": 2985, "char_end": 3103, "line": " conn.execute(\"update users set last_update = '\" + str(last_try) + \"' where chat_id = '\" + str(chat_id) + \"'\")\n" }, { "line_no": 66, "char_start": 3103, "char_end": 3229, "line": " conn.execute(\"update users set last_problem = '\" + str(last_problem[0]) + \"' where chat_id = '\" + str(chat_id) + \"'\")\n" }, { "line_no": 67, "char_start": 3229, "char_end": 3334, "line": " conn.execute(\"update users set state = '\" + str(1) + \"' where chat_id = '\" + str(chat_id) + \"'\")\n" } ], "added": [ { "line_no": 42, "char_start": 1785, "char_end": 1889, "line": " cursor.execute(\"select * from result where problem = ? and diff = ?\", (s[3], s[4]))\n" }, { "line_no": 45, "char_start": 1983, "char_end": 2107, "line": " cursor.execute(\"update result set verdict = ? where problem = ? and diff = ?\", (s2[1], s[3], s[4]))\n" }, { "line_no": 47, "char_start": 2158, "char_end": 2282, "line": " cursor.execute(\"update result set verdict = ? where problem = ? and diff = ?\", (s2[1], s[3], s[4]))\n" }, { "line_no": 55, "char_start": 2549, "char_end": 2624, "line": " conn.execute(\"select * from users where chat_id = ?\", (str(chat_id),))\n" }, { "line_no": 60, "char_start": 2802, "char_end": 2905, "line": " conn.execute(\"update users set username = ? where chat_id = ?\", (str(username), str(chat_id)))\n" }, { "line_no": 61, "char_start": 2905, "char_end": 3011, "line": " conn.execute(\"update users set last_update = ? where chat_id = ?\", (str(last_try), str(chat_id)))\n" }, { "line_no": 62, "char_start": 3011, "char_end": 3125, "line": " conn.execute(\"update users set last_problem = ? where chat_id = ?\", (str(last_problem[0]), str(chat_id)))\n" }, { "line_no": 63, "char_start": 3125, "char_end": 3218, "line": " conn.execute(\"update users set state = ? where chat_id = ?\", (str(1), str(chat_id)))\n" } ] }
{ "deleted": [ { "char_start": 893, "char_end": 907, "chars": "\n old = \"\"\n" }, { "char_start": 1872, "char_end": 1883, "chars": "'\" + s[3] +" }, { "char_start": 1884, "char_end": 1886, "chars": "\"'" }, { "char_start": 1897, "char_end": 1898, "chars": "'" }, { "char_start": 1900, "char_end": 1901, "chars": "+" }, { "char_start": 1906, "char_end": 1912, "chars": " + \"'\"" }, { "char_start": 2076, "char_end": 2091, "chars": "'\" + s2[1] + \"'" }, { "char_start": 2108, "char_end": 2122, "chars": "'\" + s[3] + \"'" }, { "char_start": 2134, "char_end": 2135, "chars": "'" }, { "char_start": 2137, "char_end": 2138, "chars": "+" }, { "char_start": 2143, "char_end": 2149, "chars": " + \"'\"" }, { "char_start": 2270, "char_end": 2284, "chars": "'\" + s2[1] +\"'" }, { "char_start": 2301, "char_end": 2315, "chars": "'\" + s[3] + \"'" }, { "char_start": 2327, "char_end": 2328, "chars": "'" }, { "char_start": 2330, "char_end": 2331, "chars": "+" }, { "char_start": 2336, "char_end": 2342, "chars": " + \"'\"" }, { "char_start": 2343, "char_end": 2344, "chars": "\n" }, { "char_start": 2398, "char_end": 2399, "chars": "\n" }, { "char_start": 2667, "char_end": 2668, "chars": "'" }, { "char_start": 2669, "char_end": 2671, "chars": " +" }, { "char_start": 2684, "char_end": 2690, "chars": " + \"'\"" }, { "char_start": 2920, "char_end": 2943, "chars": "'\" + str(username) + \"'" }, { "char_start": 2960, "char_end": 2961, "chars": "'" }, { "char_start": 2963, "char_end": 2964, "chars": "+" }, { "char_start": 2977, "char_end": 2983, "chars": " + \"'\"" }, { "char_start": 3038, "char_end": 3061, "chars": "'\" + str(last_try) + \"'" }, { "char_start": 3078, "char_end": 3079, "chars": "'" }, { "char_start": 3081, "char_end": 3082, "chars": "+" }, { "char_start": 3095, "char_end": 3101, "chars": " + \"'\"" }, { "char_start": 3157, "char_end": 3158, "chars": "'" }, { "char_start": 3159, "char_end": 3161, "chars": " +" }, { "char_start": 3182, "char_end": 3208, "chars": " + \"' where chat_id = '\" +" }, { "char_start": 3221, "char_end": 3227, "chars": " + \"'\"" }, { "char_start": 3276, "char_end": 3292, "chars": "'\" + str(1) + \"'" }, { "char_start": 3309, "char_end": 3310, "chars": "'" }, { "char_start": 3312, "char_end": 3313, "chars": "+" }, { "char_start": 3326, "char_end": 3332, "chars": " + \"'\"" } ], "added": [ { "char_start": 1858, "char_end": 1859, "chars": "?" }, { "char_start": 1871, "char_end": 1872, "chars": "?" }, { "char_start": 1873, "char_end": 1874, "chars": "," }, { "char_start": 1875, "char_end": 1881, "chars": "(s[3]," }, { "char_start": 1886, "char_end": 1887, "chars": ")" }, { "char_start": 2051, "char_end": 2052, "chars": "?" }, { "char_start": 2069, "char_end": 2070, "chars": "?" }, { "char_start": 2082, "char_end": 2083, "chars": "?" }, { "char_start": 2084, "char_end": 2085, "chars": "," }, { "char_start": 2086, "char_end": 2087, "chars": "(" }, { "char_start": 2088, "char_end": 2089, "chars": "2" }, { "char_start": 2090, "char_end": 2091, "chars": "1" }, { "char_start": 2092, "char_end": 2093, "chars": "," }, { "char_start": 2094, "char_end": 2099, "chars": "s[3]," }, { "char_start": 2100, "char_end": 2105, "chars": "s[4])" }, { "char_start": 2226, "char_end": 2227, "chars": "?" }, { "char_start": 2244, "char_end": 2245, "chars": "?" }, { "char_start": 2257, "char_end": 2258, "chars": "?" }, { "char_start": 2259, "char_end": 2260, "chars": "," }, { "char_start": 2261, "char_end": 2262, "chars": "(" }, { "char_start": 2263, "char_end": 2264, "chars": "2" }, { "char_start": 2265, "char_end": 2266, "chars": "1" }, { "char_start": 2267, "char_end": 2268, "chars": "," }, { "char_start": 2269, "char_end": 2274, "chars": "s[3]," }, { "char_start": 2275, "char_end": 2279, "chars": "s[4]" }, { "char_start": 2280, "char_end": 2281, "chars": ")" }, { "char_start": 2603, "char_end": 2604, "chars": "?" }, { "char_start": 2605, "char_end": 2606, "chars": "," }, { "char_start": 2607, "char_end": 2608, "chars": "(" }, { "char_start": 2620, "char_end": 2622, "chars": ",)" }, { "char_start": 2852, "char_end": 2853, "chars": "?" }, { "char_start": 2870, "char_end": 2871, "chars": "?" }, { "char_start": 2872, "char_end": 2873, "chars": "," }, { "char_start": 2874, "char_end": 2889, "chars": "(str(username)," }, { "char_start": 2902, "char_end": 2903, "chars": ")" }, { "char_start": 2958, "char_end": 2959, "chars": "?" }, { "char_start": 2976, "char_end": 2977, "chars": "?" }, { "char_start": 2978, "char_end": 2979, "chars": "," }, { "char_start": 2980, "char_end": 2995, "chars": "(str(last_try)," }, { "char_start": 3008, "char_end": 3009, "chars": ")" }, { "char_start": 3065, "char_end": 3084, "chars": "? where chat_id = ?" }, { "char_start": 3085, "char_end": 3086, "chars": "," }, { "char_start": 3087, "char_end": 3088, "chars": "(" }, { "char_start": 3108, "char_end": 3109, "chars": "," }, { "char_start": 3122, "char_end": 3123, "chars": ")" }, { "char_start": 3172, "char_end": 3173, "chars": "?" }, { "char_start": 3190, "char_end": 3191, "chars": "?" }, { "char_start": 3192, "char_end": 3193, "chars": "," }, { "char_start": 3194, "char_end": 3202, "chars": "(str(1)," }, { "char_start": 3215, "char_end": 3216, "chars": ")" } ] }
github.com/lissrbay/codeforces_bot/commit/cc7f5143445a0030b1149ac60a65b1b1b9c92a90
bases/createuserbase.py
cwe-089
process_vote
def process_vote(target,action,chan,mask,db,notice,conn): if ' ' in target: notice('Invalid nick') return try: votes2kick = database.get(db,'channels','votekick','chan',chan) except: votes2kick = 10 try: votes2ban = database.get(db,'channels','voteban','chan',chan) except: votes2ban = 10 if len(target) is 0: if action is 'kick': notice('Votes required to kick: {}'.format(votes2kick)) elif action is 'ban': notice('Votes required to ban: {}'.format(votes2ban)) return votefinished = False global db_ready if not db_ready: db_init(db) chan = chan.lower() target = target.lower() voter = user.format_hostmask(mask) voters = db.execute("SELECT voters FROM votes where chan='{}' and action='{}' and target like '{}'".format(chan,action,target)).fetchone() if conn.nick.lower() in target: return "I dont think so Tim." if voters: voters = voters[0] if voter in voters: notice("You have already voted.") return else: voters = '{} {}'.format(voters,voter).strip() notice("Thank you for your vote!") else: voters = voter votecount = len(voters.split(' ')) if 'kick' in action: votemax = int(votes2kick) if votecount >= votemax: votefinished = True conn.send("KICK {} {} :{}".format(chan, target, "You have been voted off the island.")) if 'ban' in action: votemax = int(votes2ban) if votecount >= votemax: votefinished = True conn.send("MODE {} +b {}".format(chan, user.get_hostmask(target,db))) conn.send("KICK {} {} :".format(chan, target, "You have been voted off the island.")) if votefinished: db.execute("DELETE FROM votes where chan='{}' and action='{}' and target like '{}'".format(chan,action,target)) else: db.execute("insert or replace into votes(chan, action, target, voters, time) values(?,?,?,?,?)", (chan, action, target, voters, time.time())) db.commit() return ("Votes to {} {}: {}/{}".format(action, target, votecount,votemax))
def process_vote(target,action,chan,mask,db,notice,conn): if ' ' in target: notice('Invalid nick') return try: votes2kick = database.get(db,'channels','votekick','chan',chan) except: votes2kick = 10 try: votes2ban = database.get(db,'channels','voteban','chan',chan) except: votes2ban = 10 if len(target) is 0: if action is 'kick': notice('Votes required to kick: {}'.format(votes2kick)) elif action is 'ban': notice('Votes required to ban: {}'.format(votes2ban)) return votefinished = False global db_ready if not db_ready: db_init(db) chan = chan.lower() target = target.lower() voter = user.format_hostmask(mask) voters = db.execute("SELECT voters FROM votes where chan=? and action=? and target like ?", chan, action, target).fetchone() if conn.nick.lower() in target: return "I dont think so Tim." if voters: voters = voters[0] if voter in voters: notice("You have already voted.") return else: voters = '{} {}'.format(voters,voter).strip() notice("Thank you for your vote!") else: voters = voter votecount = len(voters.split(' ')) if 'kick' in action: votemax = int(votes2kick) if votecount >= votemax: votefinished = True conn.send("KICK {} {} :{}".format(chan, target, "You have been voted off the island.")) if 'ban' in action: votemax = int(votes2ban) if votecount >= votemax: votefinished = True conn.send("MODE {} +b {}".format(chan, user.get_hostmask(target,db))) conn.send("KICK {} {} :".format(chan, target, "You have been voted off the island.")) if votefinished: db.execute("DELETE FROM votes where chan=? and action=? and target like ?", chan, action, target) else: db.execute("insert or replace into votes(chan, action, target, voters, time) values(?,?,?,?,?)", (chan, action, target, voters, time.time())) db.commit() return ("Votes to {} {}: {}/{}".format(action, target, votecount,votemax))
{ "deleted": [ { "line_no": 22, "char_start": 707, "char_end": 850, "line": " voters = db.execute(\"SELECT voters FROM votes where chan='{}' and action='{}' and target like '{}'\".format(chan,action,target)).fetchone()\n" }, { "line_no": 51, "char_start": 1781, "char_end": 1914, "line": " if votefinished: db.execute(\"DELETE FROM votes where chan='{}' and action='{}' and target like '{}'\".format(chan,action,target))\n" } ], "added": [ { "line_no": 22, "char_start": 707, "char_end": 836, "line": " voters = db.execute(\"SELECT voters FROM votes where chan=? and action=? and target like ?\", chan, action, target).fetchone()\n" }, { "line_no": 51, "char_start": 1767, "char_end": 1886, "line": " if votefinished: db.execute(\"DELETE FROM votes where chan=? and action=? and target like ?\", chan, action, target)\n" } ] }
{ "deleted": [ { "char_start": 768, "char_end": 772, "chars": "'{}'" }, { "char_start": 784, "char_end": 788, "chars": "'{}'" }, { "char_start": 805, "char_end": 809, "chars": "'{}'" }, { "char_start": 810, "char_end": 818, "chars": ".format(" }, { "char_start": 836, "char_end": 837, "chars": ")" }, { "char_start": 1843, "char_end": 1847, "chars": "'{}'" }, { "char_start": 1859, "char_end": 1863, "chars": "'{}'" }, { "char_start": 1880, "char_end": 1884, "chars": "'{}'" }, { "char_start": 1885, "char_end": 1893, "chars": ".format(" }, { "char_start": 1911, "char_end": 1912, "chars": ")" } ], "added": [ { "char_start": 768, "char_end": 769, "chars": "?" }, { "char_start": 781, "char_end": 782, "chars": "?" }, { "char_start": 799, "char_end": 800, "chars": "?" }, { "char_start": 801, "char_end": 803, "chars": ", " }, { "char_start": 808, "char_end": 809, "chars": " " }, { "char_start": 816, "char_end": 817, "chars": " " }, { "char_start": 1829, "char_end": 1830, "chars": "?" }, { "char_start": 1842, "char_end": 1843, "chars": "?" }, { "char_start": 1860, "char_end": 1861, "chars": "?" }, { "char_start": 1862, "char_end": 1864, "chars": ", " }, { "char_start": 1869, "char_end": 1870, "chars": " " }, { "char_start": 1877, "char_end": 1878, "chars": " " } ] }
github.com/gstack/uguubot/commit/700ff40be84be88964e61f8ae780564e5862460d
plugins/vote.py
cwe-089
like
@mod.route('/like/<int:msg_id>', methods=['GET', 'POST']) def like(msg_id): if request.method == 'GET': user_id = session['logged_id'] c_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') sql = "INSERT INTO like_msg(msg_id, user_id,c_time) " + \ "VALUES(%d,'%s','%s');" % (msg_id, user_id, c_time) cursor.execute(sql) conn.commit() return redirect(url_for('show_entries'))
@mod.route('/like/<int:msg_id>', methods=['GET', 'POST']) def like(msg_id): if request.method == 'GET': user_id = session['logged_id'] c_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') cursor.execute("INSERT INTO like_msg(msg_id, user_id,c_time) VALUES(%s,%s,%s);", (msg_id, user_id, c_time)) conn.commit() return redirect(url_for('show_entries'))
{ "deleted": [ { "line_no": 6, "char_start": 209, "char_end": 275, "line": " sql = \"INSERT INTO like_msg(msg_id, user_id,c_time) \" + \\\n" }, { "line_no": 7, "char_start": 275, "char_end": 343, "line": " \"VALUES(%d,'%s','%s');\" % (msg_id, user_id, c_time)\n" }, { "line_no": 8, "char_start": 343, "char_end": 371, "line": " cursor.execute(sql)\n" } ], "added": [ { "line_no": 6, "char_start": 209, "char_end": 325, "line": " cursor.execute(\"INSERT INTO like_msg(msg_id, user_id,c_time) VALUES(%s,%s,%s);\", (msg_id, user_id, c_time))\n" } ] }
{ "deleted": [ { "char_start": 218, "char_end": 223, "chars": "ql = " }, { "char_start": 269, "char_end": 292, "chars": "\" + \\\n \"" }, { "char_start": 300, "char_end": 301, "chars": "d" }, { "char_start": 302, "char_end": 303, "chars": "'" }, { "char_start": 305, "char_end": 306, "chars": "'" }, { "char_start": 307, "char_end": 308, "chars": "'" }, { "char_start": 310, "char_end": 311, "chars": "'" }, { "char_start": 314, "char_end": 316, "chars": " %" }, { "char_start": 342, "char_end": 369, "chars": "\n cursor.execute(sql" } ], "added": [ { "char_start": 217, "char_end": 220, "chars": "cur" }, { "char_start": 221, "char_end": 232, "chars": "or.execute(" }, { "char_start": 286, "char_end": 287, "chars": "s" }, { "char_start": 296, "char_end": 297, "chars": "," } ] }
github.com/ulyssetsd/bjtu-sql/commit/17d7b21864b72ba5666f15236474a93268b32ec9
flaskr/flaskr/views/like_msg.py
cwe-089
tag_to_tag_num
def tag_to_tag_num(self, tag): ''' Returns tag_num given tag. ''' q = "SELECT rowid FROM tags WHERE tag = '" + tag + "'" self.query(q) return self.c.fetchone()[0]
def tag_to_tag_num(self, tag): ''' Returns tag_num given tag. ''' q = "SELECT rowid FROM tags WHERE tag = ?" self.query(q, tag) return self.c.fetchone()[0]
{ "deleted": [ { "line_no": 4, "char_start": 79, "char_end": 142, "line": " q = \"SELECT rowid FROM tags WHERE tag = '\" + tag + \"'\"\n" }, { "line_no": 5, "char_start": 142, "char_end": 164, "line": " self.query(q)\n" } ], "added": [ { "line_no": 4, "char_start": 79, "char_end": 130, "line": " q = \"SELECT rowid FROM tags WHERE tag = ?\"\n" }, { "line_no": 5, "char_start": 130, "char_end": 157, "line": " self.query(q, tag)\n" } ] }
{ "deleted": [ { "char_start": 127, "char_end": 140, "chars": "'\" + tag + \"'" } ], "added": [ { "char_start": 127, "char_end": 128, "chars": "?" }, { "char_start": 150, "char_end": 155, "chars": ", tag" } ] }
github.com/pukkapies/urop2019/commit/3ca2e2c291d2d5fe262d20a8e0520bdfb622432b
modules/query_lastfm.py
cwe-089
update_history_and_sourcebyinstitution
def update_history_and_sourcebyinstitution(conn, sqlite, k10plus, ai): """ Get all current sources and title numbers from Solr and log them into database. """ current_sources = get_all_current_sources(k10plus, ai) current_institutions = get_all_current_institutions(k10plus, ai) old_sourcebyinstitutions = get_all_old_sourcebyinstitutions(conn, sqlite) current_sourcebyinstitutions = [] for source in current_sources: for institution in current_institutions: if not institution or institution == " " or '"' in institution: continue sourcebyinstitution = "SID " + str(source) + " (" + institution + ")" current_sourcebyinstitutions.append(sourcebyinstitution) params = { "q": 'source_id:%s AND institution:"%s"' % (source, institution), "rows": 0, "wt": "json" } # check k10plus result = get_solr_result(k10plus, params) number = result["response"]["numFound"] if number != 0: sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES ("%s", %s)' % (sourcebyinstitution, number) sqlite.execute(sql) conn.commit() else: # check ai result = get_solr_result(ai, params) number = result["response"]["numFound"] if number != 0: # TODO: escape via sqlite sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES ("%s", %s)' % (sourcebyinstitution, number) sqlite.execute(sql) conn.commit() if sourcebyinstitution not in old_sourcebyinstitutions: logging.info("The %s is now connected to SID %s.", institution, source) sql = "INSERT INTO sourcebyinstitution (sourcebyinstitution) VALUES ('%s')" % sourcebyinstitution sqlite.execute(sql) conn.commit() if number != 0: old_sourcebyinstitution_number = get_old_sourcebyinstitution_number(conn, sqlite, sourcebyinstitution) if number < old_sourcebyinstitution_number: message = "Die Anzahl der Titel hat sich bei %s gegenueber einem frueheren Import verringert." % (sourcebyinstitution) send_message(message) # requests.exceptions.ConnectionError: HTTPConnectionPool(XXXXXX): Max retries exceeded time.sleep(0.25) for old_sourcebyinstitution in old_sourcebyinstitutions: if old_sourcebyinstitution not in current_sourcebyinstitutions: message = "Die %s ist nicht laenger für die SID %s angesigelt." % (institution, source) send_message(message)
def update_history_and_sourcebyinstitution(conn, sqlite, k10plus, ai): """ Get all current sources and title numbers from Solr and log them into database. """ current_sources = get_all_current_sources(k10plus, ai) current_institutions = get_all_current_institutions(k10plus, ai) old_sourcebyinstitutions = get_all_old_sourcebyinstitutions(conn, sqlite) current_sourcebyinstitutions = [] for source in current_sources: for institution in current_institutions: if not institution or institution == " " or '"' in institution: continue sourcebyinstitution = "SID " + str(source) + " (" + institution + ")" current_sourcebyinstitutions.append(sourcebyinstitution) params = { "q": 'source_id:%s AND institution:"%s"' % (source, institution), "rows": 0, "wt": "json" } # check k10plus result = get_solr_result(k10plus, params) number = result["response"]["numFound"] if number != 0: sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (?, ?)' sqlite.execute(sql, (sourcebyinstitution, number)) conn.commit() else: # check ai result = get_solr_result(ai, params) number = result["response"]["numFound"] if number != 0: # TODO: escape via sqlite sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (?, ?)' sqlite.execute(sql, (sourcebyinstitution, number)) conn.commit() if sourcebyinstitution not in old_sourcebyinstitutions: logging.info("The %s is now connected to SID %s.", institution, source) sql = "INSERT INTO sourcebyinstitution (sourcebyinstitution) VALUES (?)" sqlite.execute(sql, (sourcebyinstitution)) conn.commit() if number != 0: old_sourcebyinstitution_number = get_old_sourcebyinstitution_number(conn, sqlite, sourcebyinstitution) if number < old_sourcebyinstitution_number: message = "Die Anzahl der Titel hat sich bei %s gegenueber einem frueheren Import verringert." % (sourcebyinstitution) send_message(message) # requests.exceptions.ConnectionError: HTTPConnectionPool(XXXXXX): Max retries exceeded time.sleep(0.25) for old_sourcebyinstitution in old_sourcebyinstitutions: if old_sourcebyinstitution not in current_sourcebyinstitutions: message = "Die %s ist nicht laenger für die SID %s angesigelt." % (institution, source) send_message(message)
{ "deleted": [ { "line_no": 30, "char_start": 1094, "char_end": 1218, "line": " sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (\"%s\", %s)' % (sourcebyinstitution, number)\n" }, { "line_no": 31, "char_start": 1218, "char_end": 1254, "line": " sqlite.execute(sql)\n" }, { "line_no": 39, "char_start": 1516, "char_end": 1644, "line": " sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (\"%s\", %s)' % (sourcebyinstitution, number)\n" }, { "line_no": 40, "char_start": 1644, "char_end": 1684, "line": " sqlite.execute(sql)\n" }, { "line_no": 45, "char_start": 1875, "char_end": 1989, "line": " sql = \"INSERT INTO sourcebyinstitution (sourcebyinstitution) VALUES ('%s')\" % sourcebyinstitution\n" }, { "line_no": 46, "char_start": 1989, "char_end": 2025, "line": " sqlite.execute(sql)\n" } ], "added": [ { "line_no": 30, "char_start": 1094, "char_end": 1182, "line": " sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (?, ?)'\n" }, { "line_no": 31, "char_start": 1182, "char_end": 1249, "line": " sqlite.execute(sql, (sourcebyinstitution, number))\n" }, { "line_no": 39, "char_start": 1511, "char_end": 1603, "line": " sql = 'INSERT INTO history (sourcebyinstitution, titles) VALUES (?, ?)'\n" }, { "line_no": 40, "char_start": 1603, "char_end": 1674, "line": " sqlite.execute(sql, (sourcebyinstitution, number))\n" }, { "line_no": 45, "char_start": 1865, "char_end": 1954, "line": " sql = \"INSERT INTO sourcebyinstitution (sourcebyinstitution) VALUES (?)\"\n" }, { "line_no": 46, "char_start": 1954, "char_end": 2013, "line": " sqlite.execute(sql, (sourcebyinstitution))\n" } ] }
{ "deleted": [ { "char_start": 1175, "char_end": 1179, "chars": "\"%s\"" }, { "char_start": 1181, "char_end": 1183, "chars": "%s" }, { "char_start": 1185, "char_end": 1217, "chars": " % (sourcebyinstitution, number)" }, { "char_start": 1601, "char_end": 1605, "chars": "\"%s\"" }, { "char_start": 1607, "char_end": 1609, "chars": "%s" }, { "char_start": 1611, "char_end": 1643, "chars": " % (sourcebyinstitution, number)" }, { "char_start": 1960, "char_end": 1964, "chars": "'%s'" }, { "char_start": 1966, "char_end": 1988, "chars": " % sourcebyinstitution" } ], "added": [ { "char_start": 1175, "char_end": 1176, "chars": "?" }, { "char_start": 1178, "char_end": 1179, "chars": "?" }, { "char_start": 1216, "char_end": 1247, "chars": ", (sourcebyinstitution, number)" }, { "char_start": 1596, "char_end": 1597, "chars": "?" }, { "char_start": 1599, "char_end": 1600, "chars": "?" }, { "char_start": 1641, "char_end": 1672, "chars": ", (sourcebyinstitution, number)" }, { "char_start": 1950, "char_end": 1951, "chars": "?" }, { "char_start": 1988, "char_end": 2011, "chars": ", (sourcebyinstitution)" } ] }
github.com/miku/siskin/commit/7fa398d2fea72bf2e8b4808f75df4b3d35ae959a
bin/solrcheckup.py
cwe-089
change_message
def change_message(self, new_message, logged_user): update_sql = """ UPDATE Clients SET message = '{}' WHERE client_id = '{}' """.format(new_message, logged_user.get_client_id()) cursor = self.__conn.cursor() cursor.execute(update_sql) self.__conn.commit() logged_user.set_message(new_message)
def change_message(self, new_message, logged_user): update_sql = """ UPDATE Clients SET message = ? WHERE client_id = ? """ cursor = self.__conn.cursor() cursor.execute(update_sql, (new_message, logged_user.get_client_id())) self.__conn.commit() logged_user.set_message(new_message)
{ "deleted": [ { "line_no": 4, "char_start": 108, "char_end": 139, "line": " SET message = '{}'\n" }, { "line_no": 5, "char_start": 139, "char_end": 174, "line": " WHERE client_id = '{}'\n" }, { "line_no": 6, "char_start": 174, "char_end": 235, "line": " \"\"\".format(new_message, logged_user.get_client_id())\n" }, { "line_no": 10, "char_start": 275, "char_end": 310, "line": " cursor.execute(update_sql)\n" } ], "added": [ { "line_no": 4, "char_start": 108, "char_end": 136, "line": " SET message = ?\n" }, { "line_no": 5, "char_start": 136, "char_end": 168, "line": " WHERE client_id = ?\n" }, { "line_no": 6, "char_start": 168, "char_end": 180, "line": " \"\"\"\n" }, { "line_no": 10, "char_start": 220, "char_end": 299, "line": " cursor.execute(update_sql, (new_message, logged_user.get_client_id()))\n" } ] }
{ "deleted": [ { "char_start": 134, "char_end": 138, "chars": "'{}'" }, { "char_start": 169, "char_end": 173, "chars": "'{}'" }, { "char_start": 185, "char_end": 234, "chars": ".format(new_message, logged_user.get_client_id())" } ], "added": [ { "char_start": 134, "char_end": 135, "chars": "?" }, { "char_start": 166, "char_end": 167, "chars": "?" }, { "char_start": 253, "char_end": 297, "chars": ", (new_message, logged_user.get_client_id())" } ] }
github.com/AnetaStoycheva/Programming101_HackBulgaria/commit/c0d6f4b8fe83a375832845a45952b5153e4c34f3
Week_9/sql_manager.py
cwe-089
create_event
def create_event(self, title, start_time, time_zone, server_id, description): sql = """INSERT INTO events (title, start_time, time_zone, server_id, description) VALUES ('{0}', '{1}', '{2}', '{3}', '{4}') """.format(title, start_time, time_zone, server_id, description) self.cur.execute(sql) self.conn.commit()
def create_event(self, title, start_time, time_zone, server_id, description): sql = """ INSERT INTO events (title, start_time, time_zone, server_id, description) VALUES (%s, %s, %s, %s, %s) """ self.cur.execute(sql, (title, start_time, time_zone, server_id, description)) self.conn.commit()
{ "deleted": [ { "line_no": 2, "char_start": 82, "char_end": 173, "line": " sql = \"\"\"INSERT INTO events (title, start_time, time_zone, server_id, description)\n" }, { "line_no": 3, "char_start": 173, "char_end": 233, "line": " VALUES ('{0}', '{1}', '{2}', '{3}', '{4}')\n" }, { "line_no": 4, "char_start": 233, "char_end": 315, "line": " \"\"\".format(title, start_time, time_zone, server_id, description)\n" }, { "line_no": 5, "char_start": 315, "char_end": 345, "line": " self.cur.execute(sql)\n" } ], "added": [ { "line_no": 2, "char_start": 82, "char_end": 100, "line": " sql = \"\"\"\n" }, { "line_no": 3, "char_start": 100, "char_end": 188, "line": " INSERT INTO events (title, start_time, time_zone, server_id, description)\n" }, { "line_no": 4, "char_start": 188, "char_end": 230, "line": " VALUES (%s, %s, %s, %s, %s)\n" }, { "line_no": 5, "char_start": 230, "char_end": 248, "line": " \"\"\"\n" }, { "line_no": 6, "char_start": 248, "char_end": 334, "line": " self.cur.execute(sql, (title, start_time, time_zone, server_id, description))\n" } ] }
{ "deleted": [ { "char_start": 187, "char_end": 190, "chars": " " }, { "char_start": 198, "char_end": 203, "chars": "'{0}'" }, { "char_start": 205, "char_end": 210, "chars": "'{1}'" }, { "char_start": 212, "char_end": 217, "chars": "'{2}'" }, { "char_start": 219, "char_end": 224, "chars": "'{3}'" }, { "char_start": 226, "char_end": 231, "chars": "'{4}'" }, { "char_start": 250, "char_end": 253, "chars": "\"\"\"" }, { "char_start": 254, "char_end": 256, "chars": "fo" }, { "char_start": 257, "char_end": 259, "chars": "ma" }, { "char_start": 314, "char_end": 343, "chars": "\n self.cur.execute(sql" } ], "added": [ { "char_start": 99, "char_end": 114, "chars": "\n " }, { "char_start": 210, "char_end": 212, "chars": "%s" }, { "char_start": 214, "char_end": 216, "chars": "%s" }, { "char_start": 218, "char_end": 220, "chars": "%s" }, { "char_start": 222, "char_end": 224, "chars": "%s" }, { "char_start": 226, "char_end": 228, "chars": "%s" }, { "char_start": 247, "char_end": 259, "chars": "\n sel" }, { "char_start": 260, "char_end": 263, "chars": ".cu" }, { "char_start": 264, "char_end": 270, "chars": ".execu" }, { "char_start": 271, "char_end": 278, "chars": "e(sql, " } ] }
github.com/jgayfer/spirit/commit/01c846c534c8d3cf6763f8b7444a0efe2caa3799
db/dbase.py
cwe-089
get_login2
@bot.message_handler(func = lambda message: get_current_state(message.chat.id) == config.States.S_LOGIN.value) def get_login2(message): settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\bases\\settings.db") conn = settings.cursor() if bases.createuserbase.check_username(message.text): bot.send_message(message.chat.id, "Invalid handle.") set_state(message.chat.id, config.States.S_START.value) return 0 conn.execute("select * from users where chat_id = '" + str(message.chat.id) + "'") name = conn.fetchone() settings.close() bases.update.cf_update() bases.createuserbase.clean_base(name[1]) bases.createuserbase.clean_base(message.text) bot.send_message(message.chat.id, "Creating base...") bases.createuserbase.init_user(message.text, message.chat.id) bot.send_message(message.chat.id, "Done!") set_state(message.chat.id, config.States.S_START.value)
@bot.message_handler(func = lambda message: get_current_state(message.chat.id) == config.States.S_LOGIN.value) def get_login2(message): settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\bases\\settings.db") conn = settings.cursor() if bases.createuserbase.check_username(message.text): bot.send_message(message.chat.id, "Invalid handle.") set_state(message.chat.id, config.States.S_START.value) return 0 conn.execute("select * from users where chat_id = ?", (str(message.chat.id),)) name = conn.fetchone() settings.close() bases.update.cf_update() bases.createuserbase.clean_base(name[1]) bases.createuserbase.clean_base(message.text) bot.send_message(message.chat.id, "Creating base...") bases.createuserbase.init_user(message.text, message.chat.id) bot.send_message(message.chat.id, "Done!") set_state(message.chat.id, config.States.S_START.value)
{ "deleted": [ { "line_no": 9, "char_start": 465, "char_end": 466, "line": "\n" }, { "line_no": 10, "char_start": 466, "char_end": 553, "line": " conn.execute(\"select * from users where chat_id = '\" + str(message.chat.id) + \"'\")\n" } ], "added": [ { "line_no": 9, "char_start": 465, "char_end": 548, "line": " conn.execute(\"select * from users where chat_id = ?\", (str(message.chat.id),))\n" } ] }
{ "deleted": [ { "char_start": 465, "char_end": 466, "chars": "\n" }, { "char_start": 520, "char_end": 521, "chars": "'" }, { "char_start": 522, "char_end": 524, "chars": " +" }, { "char_start": 545, "char_end": 551, "chars": " + \"'\"" } ], "added": [ { "char_start": 519, "char_end": 520, "chars": "?" }, { "char_start": 521, "char_end": 522, "chars": "," }, { "char_start": 523, "char_end": 524, "chars": "(" }, { "char_start": 544, "char_end": 546, "chars": ",)" } ] }
github.com/lissrbay/codeforces_bot/commit/cc7f5143445a0030b1149ac60a65b1b1b9c92a90
bot.py
cwe-089
get_bracket_graph_data
def get_bracket_graph_data(db, tag): # First, we have to find out which scenes this player has brackets in sql = "SELECT DISTINCT scene FROM ranks WHERE player='{}'".format(tag) scenes = db.exec(sql) scenes = [s[0] for s in scenes] bracket_placings_by_scene = {s: get_bracket_placings_in_scene(db, s, tag) for s in scenes} return bracket_placings_by_scene
def get_bracket_graph_data(db, tag): # First, we have to find out which scenes this player has brackets in sql = "SELECT DISTINCT scene FROM ranks WHERE player='{tag}'" args = {'tag': tag} scenes = db.exec(sql, args) scenes = [s[0] for s in scenes] bracket_placings_by_scene = {s: get_bracket_placings_in_scene(db, s, tag) for s in scenes} return bracket_placings_by_scene
{ "deleted": [ { "line_no": 3, "char_start": 111, "char_end": 186, "line": " sql = \"SELECT DISTINCT scene FROM ranks WHERE player='{}'\".format(tag)\n" }, { "line_no": 4, "char_start": 186, "char_end": 212, "line": " scenes = db.exec(sql)\n" } ], "added": [ { "line_no": 3, "char_start": 111, "char_end": 177, "line": " sql = \"SELECT DISTINCT scene FROM ranks WHERE player='{tag}'\"\n" }, { "line_no": 4, "char_start": 177, "char_end": 201, "line": " args = {'tag': tag}\n" }, { "line_no": 5, "char_start": 201, "char_end": 233, "line": " scenes = db.exec(sql, args)\n" } ] }
{ "deleted": [ { "char_start": 173, "char_end": 176, "chars": ".fo" }, { "char_start": 177, "char_end": 178, "chars": "m" }, { "char_start": 179, "char_end": 181, "chars": "t(" }, { "char_start": 184, "char_end": 185, "chars": ")" } ], "added": [ { "char_start": 170, "char_end": 173, "chars": "tag" }, { "char_start": 176, "char_end": 182, "chars": "\n a" }, { "char_start": 183, "char_end": 190, "chars": "gs = {'" }, { "char_start": 191, "char_end": 196, "chars": "ag': " }, { "char_start": 199, "char_end": 200, "chars": "}" }, { "char_start": 225, "char_end": 231, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
bracket_utils.py
cwe-089
get
def get(self, email): """ Fetch data for admin with the corresponding email """ return database_utilities.execute_query(f"""select * from admins where email = '{email}'""")
def get(self, email): """ Fetch data for admin with the corresponding email """ return database_utilities.execute_query(f"""select * from admins where email = %s""", (email, ))
{ "deleted": [ { "line_no": 3, "char_start": 92, "char_end": 192, "line": " return database_utilities.execute_query(f\"\"\"select * from admins where email = '{email}'\"\"\")\n" } ], "added": [ { "line_no": 3, "char_start": 92, "char_end": 196, "line": " return database_utilities.execute_query(f\"\"\"select * from admins where email = %s\"\"\", (email, ))\n" } ] }
{ "deleted": [ { "char_start": 179, "char_end": 181, "chars": "'{" }, { "char_start": 186, "char_end": 191, "chars": "}'\"\"\"" } ], "added": [ { "char_start": 179, "char_end": 187, "chars": "%s\"\"\", (" }, { "char_start": 192, "char_end": 195, "chars": ", )" } ] }
github.com/sgosal2/tiger-boards-backend/commit/4670109dd613df2f2fe7e8403ebd149df2b55485
apis/admins.py
cwe-089
insertUsage
def insertUsage(user, command): c, conn = getConnection() date = now() c.execute("INSERT INTO usage (date,user,command) VALUES ('"+date+"','"+str(user)+"','"+command+"')") conn.commit() conn.close()
def insertUsage(user, command): c, conn = getConnection() date = now() c.execute("INSERT INTO usage (date,user,command) VALUES (?,?,?)",(date,str(user),command)) conn.commit() conn.close()
{ "deleted": [ { "line_no": 4, "char_start": 73, "char_end": 175, "line": "\tc.execute(\"INSERT INTO usage (date,user,command) VALUES ('\"+date+\"','\"+str(user)+\"','\"+command+\"')\")\n" } ], "added": [ { "line_no": 4, "char_start": 73, "char_end": 165, "line": "\tc.execute(\"INSERT INTO usage (date,user,command) VALUES (?,?,?)\",(date,str(user),command))\n" } ] }
{ "deleted": [ { "char_start": 131, "char_end": 132, "chars": "'" }, { "char_start": 133, "char_end": 134, "chars": "+" }, { "char_start": 138, "char_end": 141, "chars": "+\"'" }, { "char_start": 142, "char_end": 145, "chars": "'\"+" }, { "char_start": 154, "char_end": 157, "chars": "+\"'" }, { "char_start": 158, "char_end": 161, "chars": "'\"+" }, { "char_start": 168, "char_end": 171, "chars": "+\"'" }, { "char_start": 172, "char_end": 173, "chars": "\"" } ], "added": [ { "char_start": 131, "char_end": 137, "chars": "?,?,?)" }, { "char_start": 138, "char_end": 140, "chars": ",(" } ] }
github.com/DangerBlack/DungeonsAndDragonsMasterBot/commit/63f980c6dff746f5fcf3005d0646b6c24f81cdc0
database.py
cwe-089
verify_email
def verify_email(self, member): query = "SELECT COUNT(email) FROM members WHERE email = '{email}'".format(email = member) self.cursor.execute(query) result = self.cursor.fetchone() if (int(result[0]) > 0): return True else: return False
def verify_email(self, member): self.cursor.execute("SELECT COUNT(email) FROM members WHERE email = ':email'", {'email':member}) result = self.cursor.fetchone() if (int(result[0]) > 0): return True else: return False
{ "deleted": [ { "line_no": 2, "char_start": 36, "char_end": 134, "line": " query = \"SELECT COUNT(email) FROM members WHERE email = '{email}'\".format(email = member)\n" }, { "line_no": 3, "char_start": 134, "char_end": 169, "line": " self.cursor.execute(query)\n" } ], "added": [ { "line_no": 2, "char_start": 36, "char_end": 141, "line": " self.cursor.execute(\"SELECT COUNT(email) FROM members WHERE email = ':email'\", {'email':member})\n" } ] }
{ "deleted": [ { "char_start": 44, "char_end": 46, "chars": "qu" }, { "char_start": 48, "char_end": 52, "chars": "y = " }, { "char_start": 101, "char_end": 102, "chars": "{" }, { "char_start": 107, "char_end": 108, "chars": "}" }, { "char_start": 110, "char_end": 118, "chars": ".format(" }, { "char_start": 123, "char_end": 126, "chars": " = " }, { "char_start": 132, "char_end": 167, "chars": ")\n self.cursor.execute(query" } ], "added": [ { "char_start": 44, "char_end": 60, "chars": "self.cursor.exec" }, { "char_start": 61, "char_end": 62, "chars": "t" }, { "char_start": 63, "char_end": 64, "chars": "(" }, { "char_start": 113, "char_end": 114, "chars": ":" }, { "char_start": 121, "char_end": 125, "chars": ", {'" }, { "char_start": 130, "char_end": 132, "chars": "':" }, { "char_start": 138, "char_end": 139, "chars": "}" } ] }
github.com/kenboo98/291-Mini-Project-I/commit/3080ccb687c79c83954ce703faee8fcceec8c9eb
book_rides/book_rides.py
cwe-089
get_previous_yields
def get_previous_yields(self, inverter_serial): query = ''' SELECT TimeStamp, EToday, ETotal FROM Inverters WHERE Serial = '%s' ''' % (inverter_serial) self.c.execute(query) data = self.c.fetchone() return data[0], data[1], data[2]
def get_previous_yields(self, inverter_serial): query = ''' SELECT TimeStamp, EToday, ETotal FROM Inverters WHERE Serial=? ''' self.c.execute(query, (inverter_serial,)) data = self.c.fetchone() return data[0], data[1], data[2]
{ "deleted": [ { "line_no": 5, "char_start": 142, "char_end": 173, "line": " WHERE Serial = '%s'\n" }, { "line_no": 6, "char_start": 173, "char_end": 205, "line": " ''' % (inverter_serial)\n" }, { "line_no": 7, "char_start": 205, "char_end": 235, "line": " self.c.execute(query)\n" } ], "added": [ { "line_no": 5, "char_start": 142, "char_end": 168, "line": " WHERE Serial=?\n" }, { "line_no": 6, "char_start": 168, "char_end": 180, "line": " '''\n" }, { "line_no": 7, "char_start": 180, "char_end": 230, "line": " self.c.execute(query, (inverter_serial,))\n" } ] }
{ "deleted": [ { "char_start": 165, "char_end": 166, "chars": " " }, { "char_start": 167, "char_end": 172, "chars": " '%s'" }, { "char_start": 184, "char_end": 204, "chars": " % (inverter_serial)" } ], "added": [ { "char_start": 166, "char_end": 167, "chars": "?" }, { "char_start": 208, "char_end": 228, "chars": ", (inverter_serial,)" } ] }
github.com/philipptrenz/s0-bridge/commit/269b48caa05377b7c58c3e6d1622a4429cb5ba65
util/database.py
cwe-089
referrer_count
@app.route('/referrer_count') def referrer_count(): account_id = request.args.get('account_id') if not isObject(account_id): ws.send('{"id":1, "method":"call", "params":[0,"lookup_account_names",[["' + account_id + '"], 0]]}') result_l = ws.recv() j_l = json.loads(result_l) account_id = j_l["result"][0]["id"] con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "select count(*) from referrers where referrer='"+account_id+"'" cur.execute(query) results = cur.fetchone() return jsonify(results)
@app.route('/referrer_count') def referrer_count(): account_id = request.args.get('account_id') if not isObject(account_id): ws.send('{"id":1, "method":"call", "params":[0,"lookup_account_names",[["' + account_id + '"], 0]]}') result_l = ws.recv() j_l = json.loads(result_l) account_id = j_l["result"][0]["id"] con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "select count(*) from referrers where referrer=%s" cur.execute(query, (account_id,)) results = cur.fetchone() return jsonify(results)
{ "deleted": [ { "line_no": 15, "char_start": 424, "char_end": 501, "line": " query = \"select count(*) from referrers where referrer='\"+account_id+\"'\"\n" }, { "line_no": 16, "char_start": 501, "char_end": 524, "line": " cur.execute(query)\n" } ], "added": [ { "line_no": 15, "char_start": 424, "char_end": 487, "line": " query = \"select count(*) from referrers where referrer=%s\"\n" }, { "line_no": 16, "char_start": 487, "char_end": 525, "line": " cur.execute(query, (account_id,))\n" } ] }
{ "deleted": [ { "char_start": 483, "char_end": 499, "chars": "'\"+account_id+\"'" } ], "added": [ { "char_start": 483, "char_end": 485, "chars": "%s" }, { "char_start": 508, "char_end": 523, "chars": ", (account_id,)" } ] }
github.com/VinChain/vinchain-python-api-backend/commit/b78088a551fbb712121269c6eb7f43ede120ff60
api.py
cwe-089
analyze_scene
def analyze_scene(self, scene): base_urls = scene.get_base_urls() users = scene.get_users() name = scene.get_name() LOG.info('found the following users for scene {}: {}'.format(name, users)) # This scene might have one user who always posts the brackets on their challonge account for user in users: # Have we analyzed this user before? sql = "SELECT * FROM user_analyzed WHERE user='{}';".format(user) results = self.db.exec(sql) # Did we have any matches in the database? if len(results) > 0: # We have analyzed this user before. Just grab one page of brackets to see if there have been any new tournaments # eg, just look at /users/christmasmike?page=1 instead of all the pages that exist most_recent_page = bracket_utils.get_brackets_from_user(user, pages=1) for bracket in most_recent_page: LOG.info('here are the brackets from the most recent page of user {}: {}'.format(user, most_recent_page)) # This user has already been analyzed, there's a good chance this bracket has been analyzed also sql = "SELECT * FROM user_analyzed WHERE url='{}' AND user='{}';".format(bracket, user) results = self.db.exec(sql) if len(results) == 0: # This is a new bracket that must have been published in the last hour or so LOG.info('found this url from a user: {} {}'.format(bracket, user)) display_name = bracket_utils.get_display_base(bracket) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name) # mark this bracket as analyzed sql = "INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');".format(bracket, user, name) self.db.exec(sql) # Tweet that we found a new bracket msg = "Found new {} bracket: {}".format(name, bracket) tweet(msg) else: LOG.info('url {} is not new for user {}'.format(bracket, user)) else: # This is a new user, analyze all brackets user_urls = bracket_utils.get_brackets_from_user(user) for url in user_urls: LOG.info('found this url from a user: {} {}'.format(url, user)) display_name = bracket_utils.get_display_base(url) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(url, name, display_name) # mark this bracket as analyzed sql = "INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');".format(url, user, name) self.db.exec(sql) LOG.info('done with user {}'.format(user)) # This scene might always call their brackets the same thing, eg weekly1, weekly2, weekly3 etc for base_url in base_urls: # attempt to load this data from the database LOG.info('About to start this analysis thread for scene {}'.format(scene.get_name())) sql = "SELECT first,last FROM valids WHERE base_url = '" + str(base_url) + "';" result = self.db.exec(sql) has_results = len(result) > 0 # Did we find a match in the database? if has_results: LOG.info("validURLs found values in the database" + str(result)) first = result[0][0] last = result[0][1] # Check for a new valid URL new_last = bracket_utils._get_last_valid_url(base_url, last-1) if not new_last == last: if new_last - last > 5: with open("DEBUGOUTPUT.txt", 'a') as f: f.write("[validURLs.py:55]: found a SHIT TON of new tournaments for bracket: {}".format(base_url)) else: bracket = base_url.replace('###', str(new_last)) LOG.info('Found new bracket: {}'.format(bracket)) msg = "Found new bracket: {}".format(bracket) tweet(msg) # If there's been a new last, update the database sql = "UPDATE valids SET last=" + str(new_last) + " where base_url = '"+str(base_url)+"';" self.db.exec(sql) # Analyze each of these new brackets for i in range(last+1, new_last+1): # Since this URL is new, we have to process the data bracket = base_url.replace('###', str(i)) # Create the display name for this bracket # Eg challonge.com/NP9ATX54 -> NP9 54 display_name = bracket_utils.get_display_base(bracket, counter=i) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name, new_bracket=True) else: # We need to create first and last from scratch first = bracket_utils._get_first_valid_url(base_url) last = bracket_utils._get_last_valid_url(base_url, first) # This is new data, we need to put it into the db sql = "INSERT INTO valids (base_url, first, last, scene) VALUES (" sql += "'"+str(base_url)+"', "+str(first)+ ", "+str(last)+", '"+str(name)+"');" self.db.exec(sql) for i in range(first, last+1): bracket = base_url.replace('###', str(i)) # Create the display name for this bracket # Eg challonge.com/NP9ATX54 -> NP9 54 display_name = bracket_utils.get_display_base(bracket, counter=i) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name) # Calculate ranks after each tournament so we can see how players are progressing if not analyzed_scenes and should_tweet: tweet('About to start ranking for scene {}'.format(name)) self.data_processor.check_and_update_ranks(name)
def analyze_scene(self, scene): base_urls = scene.get_base_urls() users = scene.get_users() name = scene.get_name() LOG.info('found the following users for scene {}: {}'.format(name, users)) # This scene might have one user who always posts the brackets on their challonge account for user in users: # Have we analyzed this user before? sql = "SELECT * FROM user_analyzed WHERE user='{user}';" args = {'user': user} results = self.db.exec(sql, args) # Did we have any matches in the database? if len(results) > 0: # We have analyzed this user before. Just grab one page of brackets to see if there have been any new tournaments # eg, just look at /users/christmasmike?page=1 instead of all the pages that exist most_recent_page = bracket_utils.get_brackets_from_user(user, pages=1) for bracket in most_recent_page: LOG.info('here are the brackets from the most recent page of user {}: {}'.format(user, most_recent_page)) # This user has already been analyzed, there's a good chance this bracket has been analyzed also sql = "SELECT * FROM user_analyzed WHERE url='{bracket}' AND user='{user}';" args = {'bracket': bracket, 'user': user} results = self.db.exec(sql, args) if len(results) == 0: # This is a new bracket that must have been published in the last hour or so LOG.info('found this url from a user: {} {}'.format(bracket, user)) display_name = bracket_utils.get_display_base(bracket) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name) # mark this bracket as analyzed sql = "INSERT INTO user_analyzed (url, user, scene) VALUES ('{bracket}', '{user}', '{name}');" args = {'bracket': bracket, 'user':user, 'name':name} self.db.exec(sql, args) # Tweet that we found a new bracket msg = "Found new {} bracket: {}".format(name, bracket) tweet(msg) else: LOG.info('url {} is not new for user {}'.format(bracket, user)) else: # This is a new user, analyze all brackets user_urls = bracket_utils.get_brackets_from_user(user) for url in user_urls: LOG.info('found this url from a user: {} {}'.format(url, user)) display_name = bracket_utils.get_display_base(url) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(url, name, display_name) # mark this bracket as analyzed sql = "INSERT INTO user_analyzed (url, user, scene) VALUES ('{url}', '{user}', '{name}');" args = {'url': url, 'user':user, 'name':name} self.db.exec(sql, args) LOG.info('done with user {}'.format(user)) # This scene might always call their brackets the same thing, eg weekly1, weekly2, weekly3 etc for base_url in base_urls: # attempt to load this data from the database LOG.info('About to start this analysis thread for scene {}'.format(scene.get_name())) sql = "SELECT first,last FROM valids WHERE base_url = '{base_url}';" args = {'base_url': base_url} result = self.db.exec(sql, args) has_results = len(result) > 0 # Did we find a match in the database? if has_results: LOG.info("validURLs found values in the database" + str(result)) first = result[0][0] last = result[0][1] # Check for a new valid URL new_last = bracket_utils._get_last_valid_url(base_url, last-1) if not new_last == last: if new_last - last > 5: with open("DEBUGOUTPUT.txt", 'a') as f: f.write("[validURLs.py:55]: found a SHIT TON of new tournaments for bracket: {}".format(base_url)) else: bracket = base_url.replace('###', str(new_last)) LOG.info('Found new bracket: {}'.format(bracket)) msg = "Found new bracket: {}".format(bracket) tweet(msg) # If there's been a new last, update the database sql = "UPDATE valids SET last={new_last} where base_url='{base_url}';" args = {'new_last': new_last, 'base_url': base_url} self.db.exec(sql, args) # Analyze each of these new brackets for i in range(last+1, new_last+1): # Since this URL is new, we have to process the data bracket = base_url.replace('###', str(i)) # Create the display name for this bracket # Eg challonge.com/NP9ATX54 -> NP9 54 display_name = bracket_utils.get_display_base(bracket, counter=i) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name, new_bracket=True) else: # We need to create first and last from scratch first = bracket_utils._get_first_valid_url(base_url) last = bracket_utils._get_last_valid_url(base_url, first) # This is new data, we need to put it into the db sql = "INSERT INTO valids (base_url, first, last, scene) VALUES ('{base_url}', '{first}', '{last}', '{name}');" args = {'base_url': base_url, 'first': first, 'last': last, 'name': name} self.db.exec(sql, args) for i in range(first, last+1): bracket = base_url.replace('###', str(i)) # Create the display name for this bracket # Eg challonge.com/NP9ATX54 -> NP9 54 display_name = bracket_utils.get_display_base(bracket, counter=i) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue self.data_processor.process(bracket, name, display_name) # Calculate ranks after each tournament so we can see how players are progressing if not analyzed_scenes and should_tweet: tweet('About to start ranking for scene {}'.format(name)) self.data_processor.check_and_update_ranks(name)
{ "deleted": [ { "line_no": 10, "char_start": 402, "char_end": 480, "line": " sql = \"SELECT * FROM user_analyzed WHERE user='{}';\".format(user)\n" }, { "line_no": 11, "char_start": 480, "char_end": 520, "line": " results = self.db.exec(sql)\n" }, { "line_no": 21, "char_start": 1217, "char_end": 1325, "line": " sql = \"SELECT * FROM user_analyzed WHERE url='{}' AND user='{}';\".format(bracket, user)\n" }, { "line_no": 22, "char_start": 1325, "char_end": 1373, "line": " results = self.db.exec(sql)\n" }, { "line_no": 36, "char_start": 2156, "char_end": 2288, "line": " sql = \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\".format(bracket, user, name)\n" }, { "line_no": 37, "char_start": 2288, "char_end": 2330, "line": " self.db.exec(sql)\n" }, { "line_no": 58, "char_start": 3400, "char_end": 3524, "line": " sql = \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\".format(url, user, name)\n" }, { "line_no": 59, "char_start": 3524, "char_end": 3562, "line": " self.db.exec(sql)\n" }, { "line_no": 68, "char_start": 3918, "char_end": 4010, "line": " sql = \"SELECT first,last FROM valids WHERE base_url = '\" + str(base_url) + \"';\"\n" }, { "line_no": 69, "char_start": 4010, "char_end": 4049, "line": " result = self.db.exec(sql)\n" }, { "line_no": 93, "char_start": 5077, "char_end": 5188, "line": " sql = \"UPDATE valids SET last=\" + str(new_last) + \" where base_url = '\"+str(base_url)+\"';\"\n" }, { "line_no": 94, "char_start": 5188, "char_end": 5226, "line": " self.db.exec(sql)\n" }, { "line_no": 117, "char_start": 6425, "char_end": 6508, "line": " sql = \"INSERT INTO valids (base_url, first, last, scene) VALUES (\"\n" }, { "line_no": 118, "char_start": 6508, "char_end": 6604, "line": " sql += \"'\"+str(base_url)+\"', \"+str(first)+ \", \"+str(last)+\", '\"+str(name)+\"');\"\n" }, { "line_no": 119, "char_start": 6604, "char_end": 6638, "line": " self.db.exec(sql)\n" } ], "added": [ { "line_no": 10, "char_start": 402, "char_end": 471, "line": " sql = \"SELECT * FROM user_analyzed WHERE user='{user}';\"\n" }, { "line_no": 11, "char_start": 471, "char_end": 505, "line": " args = {'user': user}\n" }, { "line_no": 12, "char_start": 505, "char_end": 551, "line": " results = self.db.exec(sql, args)\n" }, { "line_no": 22, "char_start": 1248, "char_end": 1345, "line": " sql = \"SELECT * FROM user_analyzed WHERE url='{bracket}' AND user='{user}';\"\n" }, { "line_no": 23, "char_start": 1345, "char_end": 1407, "line": " args = {'bracket': bracket, 'user': user}\n" }, { "line_no": 24, "char_start": 1407, "char_end": 1461, "line": " results = self.db.exec(sql, args)\n" }, { "line_no": 38, "char_start": 2244, "char_end": 2363, "line": " sql = \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{bracket}', '{user}', '{name}');\"\n" }, { "line_no": 39, "char_start": 2363, "char_end": 2441, "line": " args = {'bracket': bracket, 'user':user, 'name':name}\n" }, { "line_no": 40, "char_start": 2441, "char_end": 2489, "line": " self.db.exec(sql, args)\n" }, { "line_no": 61, "char_start": 3559, "char_end": 3670, "line": " sql = \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{url}', '{user}', '{name}');\"\n" }, { "line_no": 62, "char_start": 3670, "char_end": 3736, "line": " args = {'url': url, 'user':user, 'name':name}\n" }, { "line_no": 63, "char_start": 3736, "char_end": 3780, "line": " self.db.exec(sql, args)\n" }, { "line_no": 72, "char_start": 4136, "char_end": 4217, "line": " sql = \"SELECT first,last FROM valids WHERE base_url = '{base_url}';\"\n" }, { "line_no": 73, "char_start": 4217, "char_end": 4259, "line": " args = {'base_url': base_url}\n" }, { "line_no": 74, "char_start": 4259, "char_end": 4304, "line": " result = self.db.exec(sql, args)\n" }, { "line_no": 98, "char_start": 5332, "char_end": 5423, "line": " sql = \"UPDATE valids SET last={new_last} where base_url='{base_url}';\"\n" }, { "line_no": 99, "char_start": 5423, "char_end": 5495, "line": " args = {'new_last': new_last, 'base_url': base_url}\n" }, { "line_no": 100, "char_start": 5495, "char_end": 5539, "line": " self.db.exec(sql, args)\n" }, { "line_no": 123, "char_start": 6738, "char_end": 6866, "line": " sql = \"INSERT INTO valids (base_url, first, last, scene) VALUES ('{base_url}', '{first}', '{last}', '{name}');\"\n" }, { "line_no": 124, "char_start": 6866, "char_end": 6956, "line": " args = {'base_url': base_url, 'first': first, 'last': last, 'name': name}\n" }, { "line_no": 125, "char_start": 6956, "char_end": 6996, "line": " self.db.exec(sql, args)\n" } ] }
{ "deleted": [ { "char_start": 466, "char_end": 469, "chars": ".fo" }, { "char_start": 470, "char_end": 474, "chars": "mat(" }, { "char_start": 478, "char_end": 479, "chars": ")" }, { "char_start": 1302, "char_end": 1305, "chars": ".fo" }, { "char_start": 1306, "char_end": 1307, "chars": "m" }, { "char_start": 1309, "char_end": 1310, "chars": "(" }, { "char_start": 1323, "char_end": 1324, "chars": ")" }, { "char_start": 2259, "char_end": 2262, "chars": ".fo" }, { "char_start": 2263, "char_end": 2264, "chars": "m" }, { "char_start": 2266, "char_end": 2267, "chars": "(" }, { "char_start": 2286, "char_end": 2287, "chars": ")" }, { "char_start": 3499, "char_end": 3502, "chars": ".fo" }, { "char_start": 3503, "char_end": 3507, "chars": "mat(" }, { "char_start": 3522, "char_end": 3523, "chars": ")" }, { "char_start": 3987, "char_end": 3988, "chars": "+" }, { "char_start": 3990, "char_end": 3991, "chars": "t" }, { "char_start": 3992, "char_end": 3993, "chars": "(" }, { "char_start": 4001, "char_end": 4009, "chars": ") + \"';\"" }, { "char_start": 5127, "char_end": 5135, "chars": "\" + str(" }, { "char_start": 5143, "char_end": 5148, "chars": ") + \"" }, { "char_start": 5167, "char_end": 5169, "chars": "\"+" }, { "char_start": 5172, "char_end": 5173, "chars": "(" }, { "char_start": 5181, "char_end": 5187, "chars": ")+\"';\"" }, { "char_start": 6525, "char_end": 6527, "chars": "ql" }, { "char_start": 6528, "char_end": 6529, "chars": "+" }, { "char_start": 6531, "char_end": 6532, "chars": "\"" }, { "char_start": 6533, "char_end": 6535, "chars": "\"+" }, { "char_start": 6536, "char_end": 6537, "chars": "t" }, { "char_start": 6538, "char_end": 6539, "chars": "(" }, { "char_start": 6547, "char_end": 6551, "chars": ")+\"'" }, { "char_start": 6553, "char_end": 6555, "chars": "\"+" }, { "char_start": 6557, "char_end": 6559, "chars": "r(" }, { "char_start": 6564, "char_end": 6568, "chars": ")+ \"" }, { "char_start": 6570, "char_end": 6572, "chars": "\"+" }, { "char_start": 6574, "char_end": 6576, "chars": "r(" }, { "char_start": 6580, "char_end": 6583, "chars": ")+\"" }, { "char_start": 6586, "char_end": 6592, "chars": "\"+str(" }, { "char_start": 6596, "char_end": 6599, "chars": ")+\"" }, { "char_start": 6600, "char_end": 6603, "chars": ");\"" } ], "added": [ { "char_start": 462, "char_end": 466, "chars": "user" }, { "char_start": 470, "char_end": 483, "chars": "\n " }, { "char_start": 484, "char_end": 492, "chars": "rgs = {'" }, { "char_start": 496, "char_end": 504, "chars": "': user}" }, { "char_start": 543, "char_end": 549, "chars": ", args" }, { "char_start": 1315, "char_end": 1322, "chars": "bracket" }, { "char_start": 1336, "char_end": 1340, "chars": "user" }, { "char_start": 1344, "char_end": 1366, "chars": "\n a" }, { "char_start": 1367, "char_end": 1376, "chars": "gs = {'br" }, { "char_start": 1377, "char_end": 1380, "chars": "cke" }, { "char_start": 1381, "char_end": 1384, "chars": "': " }, { "char_start": 1393, "char_end": 1394, "chars": "'" }, { "char_start": 1398, "char_end": 1406, "chars": "': user}" }, { "char_start": 1453, "char_end": 1459, "chars": ", args" }, { "char_start": 2330, "char_end": 2337, "chars": "bracket" }, { "char_start": 2343, "char_end": 2347, "chars": "user" }, { "char_start": 2353, "char_end": 2357, "chars": "name" }, { "char_start": 2362, "char_end": 2388, "chars": "\n a" }, { "char_start": 2389, "char_end": 2398, "chars": "gs = {'br" }, { "char_start": 2399, "char_end": 2402, "chars": "cke" }, { "char_start": 2403, "char_end": 2406, "chars": "': " }, { "char_start": 2415, "char_end": 2422, "chars": "'user':" }, { "char_start": 2428, "char_end": 2429, "chars": "'" }, { "char_start": 2433, "char_end": 2440, "chars": "':name}" }, { "char_start": 2481, "char_end": 2487, "chars": ", args" }, { "char_start": 3641, "char_end": 3644, "chars": "url" }, { "char_start": 3650, "char_end": 3654, "chars": "user" }, { "char_start": 3660, "char_end": 3664, "chars": "name" }, { "char_start": 3669, "char_end": 3690, "chars": "\n " }, { "char_start": 3691, "char_end": 3705, "chars": "rgs = {'url': " }, { "char_start": 3710, "char_end": 3717, "chars": "'user':" }, { "char_start": 3723, "char_end": 3724, "chars": "'" }, { "char_start": 3728, "char_end": 3735, "chars": "':name}" }, { "char_start": 3772, "char_end": 3778, "chars": ", args" }, { "char_start": 4203, "char_end": 4215, "chars": "{base_url}';" }, { "char_start": 4216, "char_end": 4219, "chars": "\n " }, { "char_start": 4220, "char_end": 4225, "chars": " " }, { "char_start": 4226, "char_end": 4230, "chars": " a" }, { "char_start": 4231, "char_end": 4238, "chars": "gs = {'" }, { "char_start": 4247, "char_end": 4258, "chars": ": base_url}" }, { "char_start": 4296, "char_end": 4302, "chars": ", args" }, { "char_start": 5382, "char_end": 5383, "chars": "{" }, { "char_start": 5391, "char_end": 5392, "chars": "}" }, { "char_start": 5407, "char_end": 5447, "chars": "='{base_url}';\"\n args" }, { "char_start": 5450, "char_end": 5451, "chars": "{" }, { "char_start": 5452, "char_end": 5469, "chars": "new_last': new_la" }, { "char_start": 5471, "char_end": 5474, "chars": ", '" }, { "char_start": 5483, "char_end": 5494, "chars": ": base_url}" }, { "char_start": 5531, "char_end": 5537, "chars": ", args" }, { "char_start": 6819, "char_end": 6864, "chars": "'{base_url}', '{first}', '{last}', '{name}');" }, { "char_start": 6882, "char_end": 6885, "chars": "arg" }, { "char_start": 6889, "char_end": 6890, "chars": "{" }, { "char_start": 6891, "char_end": 6893, "chars": "ba" }, { "char_start": 6894, "char_end": 6897, "chars": "e_u" }, { "char_start": 6898, "char_end": 6902, "chars": "l': " }, { "char_start": 6912, "char_end": 6916, "chars": "'fir" }, { "char_start": 6918, "char_end": 6921, "chars": "': " }, { "char_start": 6928, "char_end": 6931, "chars": "'la" }, { "char_start": 6933, "char_end": 6936, "chars": "': " }, { "char_start": 6948, "char_end": 6955, "chars": ": name}" }, { "char_start": 6988, "char_end": 6994, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
validURLs.py
cwe-089
get_article
def get_article(index): with conn.cursor(cursor_factory=DictCursor) as cur: query = "SELECT * FROM articles WHERE index="+str(index) cur.execute(query) article = cur.fetchone() return article
def get_article(index): with conn.cursor(cursor_factory=DictCursor) as cur: query = "SELECT * FROM articles WHERE index=%s" cur.execute(query, (index, )) article = cur.fetchone() return article
{ "deleted": [ { "line_no": 3, "char_start": 80, "char_end": 145, "line": " query = \"SELECT * FROM articles WHERE index=\"+str(index)\n" }, { "line_no": 4, "char_start": 145, "char_end": 172, "line": " cur.execute(query)\n" } ], "added": [ { "line_no": 3, "char_start": 80, "char_end": 136, "line": " query = \"SELECT * FROM articles WHERE index=%s\"\n" }, { "line_no": 4, "char_start": 136, "char_end": 174, "line": " cur.execute(query, (index, ))\n" } ] }
{ "deleted": [ { "char_start": 132, "char_end": 134, "chars": "\"+" }, { "char_start": 135, "char_end": 144, "chars": "tr(index)" } ], "added": [ { "char_start": 132, "char_end": 133, "chars": "%" }, { "char_start": 134, "char_end": 135, "chars": "\"" }, { "char_start": 161, "char_end": 172, "chars": ", (index, )" } ] }
github.com/sepehr125/arxiv-doc2vec-recommender/commit/f23a4c32e6192b145017f64734b0a9a384c9123a
app.py
cwe-089
create_cf_base
def create_cf_base(): url = 'http://codeforces.com/problemset/' r = requests.get(url) max_page = 0 soup = BeautifulSoup(r.text, "lxml") base = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\cf.db") conn = base.cursor() conn.execute("create table problems (problem INTEGER, diff CHAR)") for i in available_tags: conn.execute("create table " + i + " (problems INTEGER, diff CHAR)") for link in soup.find_all(attrs={"class" : "page-index"}): s = link.find('a') s2 = s.get("href").split('/') max_page = max(max_page, int(s2[3])) a = 0 b = 0 f = False for i in range(1, max_page + 1): r = requests.get('http://codeforces.com/problemset/' + '/page/' + str(i)) soup = BeautifulSoup(r.text, "lxml") old = '' for link in soup.find_all('a'): s = link.get('href') if s != None and s.find('/problemset') != -1: s = s.split('/') if len(s) == 5 and old != s[3] + s[4]: a = s[3] b = s[4] old = s[3] + s[4] if not f: f = True last_update = old conn.execute("insert into problems values (?, ?)", (a, b)) if len(s) == 4 and s[3] in available_tags: conn.execute("insert into " + s[3] + " values (?, ?)", (a, b)) base.commit() base.close() settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\settings.db") conn = settings.cursor() conn.execute("create table users (chat_id INTEGER, username STRING, last_update STRING, last_problem STRING, state INTEGER)") conn.execute("create table last_update_problemset (problem STRING)") conn.execute("insert into last_update_problemset values (?)", (last_update, )) settings.commit() settings.close()
def create_cf_base(): url = 'http://codeforces.com/problemset/' r = requests.get(url) max_page = 0 soup = BeautifulSoup(r.text, "lxml") base = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\cf.db") conn = base.cursor() conn.execute("create table problems (problem INTEGER, diff CHAR)") for i in available_tags: conn.execute("create table ? (problems INTEGER, diff CHAR)", (i,)) for link in soup.find_all(attrs={"class" : "page-index"}): s = link.find('a') s2 = s.get("href").split('/') max_page = max(max_page, int(s2[3])) a = 0 b = 0 f = False for i in range(1, max_page + 1): r = requests.get('http://codeforces.com/problemset/' + '/page/' + str(i)) soup = BeautifulSoup(r.text, "lxml") old = '' for link in soup.find_all('a'): s = link.get('href') if s != None and s.find('/problemset') != -1: s = s.split('/') if len(s) == 5 and old != s[3] + s[4]: a = s[3] b = s[4] old = s[3] + s[4] if not f: f = True last_update = old conn.execute("insert into problems values (?, ?)", (a, b)) if len(s) == 4 and s[3] in available_tags: conn.execute("insert into ? values (?, ?)", (s[3], a, b)) base.commit() base.close() settings = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + "\\settings.db") conn = settings.cursor() conn.execute("create table users (chat_id INTEGER, username STRING, last_update STRING, last_problem STRING, state INTEGER)") conn.execute("create table last_update_problemset (problem STRING)") conn.execute("insert into last_update_problemset values (?)", (last_update, )) settings.commit() settings.close()
{ "deleted": [ { "line_no": 10, "char_start": 360, "char_end": 437, "line": " conn.execute(\"create table \" + i + \" (problems INTEGER, diff CHAR)\")\n" }, { "line_no": 37, "char_start": 1385, "char_end": 1468, "line": " conn.execute(\"insert into \" + s[3] + \" values (?, ?)\", (a, b))\n" } ], "added": [ { "line_no": 10, "char_start": 360, "char_end": 435, "line": " conn.execute(\"create table ? (problems INTEGER, diff CHAR)\", (i,))\n" }, { "line_no": 37, "char_start": 1383, "char_end": 1461, "line": " conn.execute(\"insert into ? values (?, ?)\", (s[3], a, b))\n" } ] }
{ "deleted": [ { "char_start": 395, "char_end": 404, "chars": "\" + i + \"" }, { "char_start": 1431, "char_end": 1443, "chars": "\" + s[3] + \"" } ], "added": [ { "char_start": 395, "char_end": 396, "chars": "?" }, { "char_start": 427, "char_end": 433, "chars": ", (i,)" }, { "char_start": 1429, "char_end": 1430, "chars": "?" }, { "char_start": 1448, "char_end": 1454, "chars": "s[3], " } ] }
github.com/lissrbay/codeforces_bot/commit/cc7f5143445a0030b1149ac60a65b1b1b9c92a90
bases/createcfbase.py
cwe-089
closeGame
def closeGame(ID): db.execute("UPDATE games set Running = 'No' WHERE ID = %i" % ID) database.commit()
def closeGame(ID): db.execute("UPDATE games set Running = 'No' WHERE ID = ?", ID) database.commit()
{ "deleted": [ { "line_no": 2, "char_start": 19, "char_end": 85, "line": "\tdb.execute(\"UPDATE games set Running = 'No' WHERE ID = %i\" % ID)\n" } ], "added": [ { "line_no": 2, "char_start": 19, "char_end": 83, "line": "\tdb.execute(\"UPDATE games set Running = 'No' WHERE ID = ?\", ID)\n" } ] }
{ "deleted": [ { "char_start": 75, "char_end": 77, "chars": "%i" }, { "char_start": 78, "char_end": 80, "chars": " %" } ], "added": [ { "char_start": 75, "char_end": 76, "chars": "?" }, { "char_start": 77, "char_end": 78, "chars": "," } ] }
github.com/iScrE4m/XLeague/commit/59cab6e5fd8bd5e47f2418a7c71cb1d4e3cad0d2
plugins/database.py
cwe-089
cancelFollow
def cancelFollow(self,userid,friendid): sqlText="delete from friends where userid=%d and friendid=%d;"%(userid,friendid) result=sql.deleteDB(self.conn,sqlText) return result;
def cancelFollow(self,userid,friendid): sqlText="delete from friends where userid=%d and friendid=%s;" params=[userid,friendid] result=sql.deleteDB(self.conn,sqlText,params) return result;
{ "deleted": [ { "line_no": 2, "char_start": 44, "char_end": 133, "line": " sqlText=\"delete from friends where userid=%d and friendid=%d;\"%(userid,friendid)\n" }, { "line_no": 3, "char_start": 133, "char_end": 180, "line": " result=sql.deleteDB(self.conn,sqlText)\n" } ], "added": [ { "line_no": 2, "char_start": 44, "char_end": 115, "line": " sqlText=\"delete from friends where userid=%d and friendid=%s;\"\n" }, { "line_no": 3, "char_start": 115, "char_end": 148, "line": " params=[userid,friendid]\n" }, { "line_no": 4, "char_start": 148, "char_end": 202, "line": " result=sql.deleteDB(self.conn,sqlText,params)\n" } ] }
{ "deleted": [ { "char_start": 111, "char_end": 112, "chars": "d" }, { "char_start": 114, "char_end": 116, "chars": "%(" }, { "char_start": 131, "char_end": 132, "chars": ")" } ], "added": [ { "char_start": 111, "char_end": 112, "chars": "s" }, { "char_start": 114, "char_end": 131, "chars": "\n params=[" }, { "char_start": 146, "char_end": 147, "chars": "]" }, { "char_start": 193, "char_end": 200, "chars": ",params" } ] }
github.com/ShaominLi/Twitter_project/commit/5329d91f9e569c95184053c8e7ef596949c33ce9
modules/users.py
cwe-089
change_pass
def change_pass(self, new_pass, logged_user): update_sql = """ UPDATE Clients SET password = '{}' WHERE client_id = '{}' """.format(new_pass, logged_user.get_client_id()) cursor = self.__conn.cursor() cursor.execute(update_sql) self.__conn.commit()
def change_pass(self, new_pass, logged_user): update_sql = """ UPDATE Clients SET password = ? WHERE client_id = ? """ cursor = self.__conn.cursor() cursor.execute(update_sql, (new_pass, logged_user.get_client_id())) self.__conn.commit()
{ "deleted": [ { "line_no": 4, "char_start": 102, "char_end": 134, "line": " SET password = '{}'\n" }, { "line_no": 5, "char_start": 134, "char_end": 169, "line": " WHERE client_id = '{}'\n" }, { "line_no": 6, "char_start": 169, "char_end": 227, "line": " \"\"\".format(new_pass, logged_user.get_client_id())\n" }, { "line_no": 10, "char_start": 267, "char_end": 302, "line": " cursor.execute(update_sql)\n" } ], "added": [ { "line_no": 4, "char_start": 102, "char_end": 131, "line": " SET password = ?\n" }, { "line_no": 5, "char_start": 131, "char_end": 163, "line": " WHERE client_id = ?\n" }, { "line_no": 6, "char_start": 163, "char_end": 175, "line": " \"\"\"\n" }, { "line_no": 10, "char_start": 215, "char_end": 291, "line": " cursor.execute(update_sql, (new_pass, logged_user.get_client_id()))\n" } ] }
{ "deleted": [ { "char_start": 129, "char_end": 133, "chars": "'{}'" }, { "char_start": 164, "char_end": 168, "chars": "'{}'" }, { "char_start": 180, "char_end": 226, "chars": ".format(new_pass, logged_user.get_client_id())" } ], "added": [ { "char_start": 129, "char_end": 130, "chars": "?" }, { "char_start": 161, "char_end": 162, "chars": "?" }, { "char_start": 248, "char_end": 289, "chars": ", (new_pass, logged_user.get_client_id())" } ] }
github.com/AnetaStoycheva/Programming101_HackBulgaria/commit/c0d6f4b8fe83a375832845a45952b5153e4c34f3
Week_9/sql_manager.py
cwe-089
getCommentsByPostid
def getCommentsByPostid(self,postid,userid): sqlText="select (select Count(*) from comment_like where comments.commentid = comment_like.commentid) as like,(select Count(*) from comment_like where comments.commentid = comment_like.commentid and comment_like.userid=%d) as flag,commentid,name,comment from users,comments where users.userid=comments.userid and postid=%d order by date desc;"%(userid,postid) result=sql.queryDB(self.conn,sqlText) return result;
def getCommentsByPostid(self,postid,userid): sqlText="select (select Count(*) from comment_like where \ comments.commentid = comment_like.commentid) as like,(select Count(*) \ from comment_like where comments.commentid = \ comment_like.commentid and comment_like.userid=%s) as \ flag,commentid,name,comment from users,comments where \ users.userid=comments.userid and postid=%s order by date desc;" params=[userid,postid] result=sql.queryDB(self.conn,sqlText,params) return result;
{ "deleted": [ { "line_no": 2, "char_start": 49, "char_end": 417, "line": " sqlText=\"select (select Count(*) from comment_like where comments.commentid = comment_like.commentid) as like,(select Count(*) from comment_like where comments.commentid = comment_like.commentid and comment_like.userid=%d) as flag,commentid,name,comment from users,comments where users.userid=comments.userid and postid=%d order by date desc;\"%(userid,postid)\n" }, { "line_no": 3, "char_start": 417, "char_end": 463, "line": " result=sql.queryDB(self.conn,sqlText)\n" } ], "added": [ { "line_no": 2, "char_start": 49, "char_end": 116, "line": " sqlText=\"select (select Count(*) from comment_like where \\\n" }, { "line_no": 3, "char_start": 116, "char_end": 196, "line": " comments.commentid = comment_like.commentid) as like,(select Count(*) \\\n" }, { "line_no": 4, "char_start": 196, "char_end": 259, "line": " from comment_like where comments.commentid = \\\n" }, { "line_no": 5, "char_start": 259, "char_end": 331, "line": " comment_like.commentid and comment_like.userid=%s) as \\\n" }, { "line_no": 6, "char_start": 331, "char_end": 403, "line": " flag,commentid,name,comment from users,comments where \\\n" }, { "line_no": 7, "char_start": 403, "char_end": 483, "line": " users.userid=comments.userid and postid=%s order by date desc;\"\n" }, { "line_no": 8, "char_start": 483, "char_end": 514, "line": " params=[userid,postid]\n" }, { "line_no": 9, "char_start": 514, "char_end": 567, "line": " result=sql.queryDB(self.conn,sqlText,params)\n" } ] }
{ "deleted": [ { "char_start": 277, "char_end": 278, "chars": "d" }, { "char_start": 378, "char_end": 379, "chars": "d" }, { "char_start": 400, "char_end": 402, "chars": "%(" }, { "char_start": 415, "char_end": 416, "chars": ")" } ], "added": [ { "char_start": 114, "char_end": 124, "chars": "\\\n " }, { "char_start": 194, "char_end": 212, "chars": "\\\n " }, { "char_start": 257, "char_end": 275, "chars": "\\\n " }, { "char_start": 323, "char_end": 324, "chars": "s" }, { "char_start": 329, "char_end": 347, "chars": "\\\n " }, { "char_start": 401, "char_end": 419, "chars": "\\\n " }, { "char_start": 460, "char_end": 461, "chars": "s" }, { "char_start": 482, "char_end": 499, "chars": "\n params=[" }, { "char_start": 512, "char_end": 513, "chars": "]" }, { "char_start": 558, "char_end": 565, "chars": ",params" } ] }
github.com/ShaominLi/Twitter_project/commit/5329d91f9e569c95184053c8e7ef596949c33ce9
modules/comment.py
cwe-089
__init__.callback
def callback(recipeName): menu.pack_forget() viewRecipeFrame.pack(expand=True, fill='both') groceryButton.pack_forget() database_file = "meal_planner.db" print(recipeName) with sqlite3.connect(database_file) as conn: cursor = conn.cursor() selection = cursor.execute("""SELECT * FROM recipe WHERE name = """ + "\"" + recipeName + "\"") for result in [selection]: for row in result.fetchall(): name = row[0] time = row[1] servings = row[2] ingredients = row[4] directions = row[5] string = ("Name: {} \n Cook time: {} \n Number of Servings: {} \n ".format(name, time, servings)) secondString = ("Ingredients: {}".format(ingredients)) thirdString = ("Directions: {}".format(directions)) Label(viewRecipeFrame, text=string, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) Label(viewRecipeFrame, text=secondString, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) Label(viewRecipeFrame, text=thirdString, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) returnButton = Button(menuFrame, text = "Return to Menu", highlightbackground="#e7e7e7", command=lambda: [viewRecipeFrame.pack_forget(), menu.pack(), returnButton.pack_forget(), label.configure(text="Meal Planer"), groceryButton.pack(side=RIGHT)]) returnButton.pack(side=RIGHT)
def callback(recipeName): menu.pack_forget() viewRecipeFrame.pack(expand=True, fill='both') groceryButton.pack_forget() database_file = "meal_planner.db" print(recipeName) with sqlite3.connect(database_file) as conn: cursor = conn.cursor() selection = cursor.execute("""SELECT * FROM recipe WHERE name = ?;""", (recipeName, )) for result in [selection]: for row in result.fetchall(): name = row[0] time = row[1] servings = row[2] ingredients = row[4] directions = row[5] string = ("Name: {} \n Cook time: {} \n Number of Servings: {} \n ".format(name, time, servings)) secondString = ("Ingredients: {}".format(ingredients)) thirdString = ("Directions: {}".format(directions)) Label(viewRecipeFrame, text=string, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) Label(viewRecipeFrame, text=secondString, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) Label(viewRecipeFrame, text=thirdString, font=MEDIUM_FONT, bg="#f8f8f8", fg="#000000").pack(side=TOP) returnButton = Button(menuFrame, text = "Return to Menu", highlightbackground="#e7e7e7", command=lambda: [viewRecipeFrame.pack_forget(), menu.pack(), returnButton.pack_forget(), label.configure(text="Meal Planer"), groceryButton.pack(side=RIGHT)]) returnButton.pack(side=RIGHT)
{ "deleted": [ { "line_no": 9, "char_start": 336, "char_end": 448, "line": " selection = cursor.execute(\"\"\"SELECT * FROM recipe WHERE name = \"\"\" + \"\\\"\" + recipeName + \"\\\"\")\n" } ], "added": [ { "line_no": 9, "char_start": 336, "char_end": 439, "line": " selection = cursor.execute(\"\"\"SELECT * FROM recipe WHERE name = ?;\"\"\", (recipeName, ))\n" } ] }
{ "deleted": [ { "char_start": 419, "char_end": 421, "chars": " +" }, { "char_start": 422, "char_end": 429, "chars": "\"\\\"\" + " }, { "char_start": 439, "char_end": 441, "chars": " +" }, { "char_start": 442, "char_end": 446, "chars": "\"\\\"\"" } ], "added": [ { "char_start": 416, "char_end": 418, "chars": "?;" }, { "char_start": 421, "char_end": 422, "chars": "," }, { "char_start": 423, "char_end": 424, "chars": "(" }, { "char_start": 434, "char_end": 435, "chars": "," }, { "char_start": 436, "char_end": 437, "chars": ")" } ] }
github.com/trishamoyer/RecipePlanner-Python/commit/44d2ce370715d9344fad34b3b749322ab095a925
mealPlan.py
cwe-089
patch
@jwt_required def patch(self, user_id): """ Replaces information of corresponding user_id with request body """ query = f"""update users set user_id = %s """ query += f"""where user_id = '{user_id}'""" json_data = request.get_json() parameters = (json_data['user_id'], ) database_utilities.execute_query(query, parameters)
@jwt_required def patch(self, user_id): """ Replaces information of corresponding user_id with request body """ query = f"""update users set user_id = %s """ query += f"""where user_id = %s""" json_data = request.get_json() parameters = (json_data['user_id'], user_id) database_utilities.execute_query(query, parameters)
{ "deleted": [ { "line_no": 5, "char_start": 182, "char_end": 234, "line": " query += f\"\"\"where user_id = '{user_id}'\"\"\"\n" }, { "line_no": 7, "char_start": 273, "char_end": 319, "line": " parameters = (json_data['user_id'], )\n" } ], "added": [ { "line_no": 5, "char_start": 182, "char_end": 225, "line": " query += f\"\"\"where user_id = %s\"\"\"\n" }, { "line_no": 7, "char_start": 264, "char_end": 317, "line": " parameters = (json_data['user_id'], user_id)\n" } ] }
{ "deleted": [ { "char_start": 219, "char_end": 222, "chars": "'{u" }, { "char_start": 223, "char_end": 230, "chars": "er_id}'" } ], "added": [ { "char_start": 219, "char_end": 220, "chars": "%" }, { "char_start": 308, "char_end": 315, "chars": "user_id" } ] }
github.com/sgosal2/tiger-boards-backend/commit/4670109dd613df2f2fe7e8403ebd149df2b55485
apis/users.py
cwe-089
edit
@mod.route('/edit', methods=['GET', 'POST']) def edit(): sql = "SELECT * FROM users where email = '%s';" % (session['logged_email']) cursor.execute(sql) u = cursor.fetchone() if request.method == 'POST': sql = "UPDATE users SET nickname = '%s' where email = '%s'" \ % (request.form['nickname'], session['logged_email']) cursor.execute(sql) sql = "SELECT * FROM users where email = '%s';" \ % (session['logged_email']) cursor.execute(sql) u = cursor.fetchone() conn.commit() flash('Edit Nickname Success!') return render_template('users/edit.html', u=u)
@mod.route('/edit', methods=['GET', 'POST']) def edit(): cursor.execute("SELECT * FROM users where email = %s;", (session['logged_email'],)) u = cursor.fetchone() if request.method == 'POST': cursor.execute("UPDATE users SET nickname = %s where email = %s", (request.form['nickname'], session['logged_email'])) cursor.execute("SELECT * FROM users where email = %s;", (session['logged_email'],)) u = cursor.fetchone() conn.commit() flash('Edit Nickname Success!') return render_template('users/edit.html', u=u)
{ "deleted": [ { "line_no": 3, "char_start": 57, "char_end": 137, "line": " sql = \"SELECT * FROM users where email = '%s';\" % (session['logged_email'])\n" }, { "line_no": 4, "char_start": 137, "char_end": 161, "line": " cursor.execute(sql)\n" }, { "line_no": 7, "char_start": 220, "char_end": 290, "line": " sql = \"UPDATE users SET nickname = '%s' where email = '%s'\" \\\n" }, { "line_no": 8, "char_start": 290, "char_end": 352, "line": " % (request.form['nickname'], session['logged_email'])\n" }, { "line_no": 9, "char_start": 352, "char_end": 380, "line": " cursor.execute(sql)\n" }, { "line_no": 10, "char_start": 380, "char_end": 438, "line": " sql = \"SELECT * FROM users where email = '%s';\" \\\n" }, { "line_no": 11, "char_start": 438, "char_end": 478, "line": " % (session['logged_email'])\n" }, { "line_no": 12, "char_start": 478, "char_end": 506, "line": " cursor.execute(sql)\n" } ], "added": [ { "line_no": 3, "char_start": 57, "char_end": 145, "line": " cursor.execute(\"SELECT * FROM users where email = %s;\", (session['logged_email'],))\n" }, { "line_no": 6, "char_start": 204, "char_end": 331, "line": " cursor.execute(\"UPDATE users SET nickname = %s where email = %s\", (request.form['nickname'], session['logged_email']))\n" }, { "line_no": 7, "char_start": 331, "char_end": 423, "line": " cursor.execute(\"SELECT * FROM users where email = %s;\", (session['logged_email'],))\n" } ] }
{ "deleted": [ { "char_start": 62, "char_end": 67, "chars": "ql = " }, { "char_start": 102, "char_end": 103, "chars": "'" }, { "char_start": 105, "char_end": 106, "chars": "'" }, { "char_start": 108, "char_end": 110, "chars": " %" }, { "char_start": 136, "char_end": 159, "chars": "\n cursor.execute(sql" }, { "char_start": 229, "char_end": 234, "chars": "ql = " }, { "char_start": 263, "char_end": 264, "chars": "'" }, { "char_start": 266, "char_end": 267, "chars": "'" }, { "char_start": 282, "char_end": 283, "chars": "'" }, { "char_start": 285, "char_end": 286, "chars": "'" }, { "char_start": 287, "char_end": 299, "chars": " \\\n %" }, { "char_start": 375, "char_end": 394, "chars": "sql)\n sql = " }, { "char_start": 429, "char_end": 430, "chars": "'" }, { "char_start": 432, "char_end": 433, "chars": "'" }, { "char_start": 435, "char_end": 451, "chars": " \\\n %" }, { "char_start": 477, "char_end": 504, "chars": "\n cursor.execute(sql" } ], "added": [ { "char_start": 61, "char_end": 64, "chars": "cur" }, { "char_start": 65, "char_end": 76, "chars": "or.execute(" }, { "char_start": 115, "char_end": 116, "chars": "," }, { "char_start": 141, "char_end": 142, "chars": "," }, { "char_start": 212, "char_end": 215, "chars": "cur" }, { "char_start": 216, "char_end": 227, "chars": "or.execute(" }, { "char_start": 276, "char_end": 277, "chars": "," }, { "char_start": 329, "char_end": 330, "chars": ")" }, { "char_start": 393, "char_end": 394, "chars": "," }, { "char_start": 419, "char_end": 420, "chars": "," } ] }
github.com/ulyssetsd/bjtu-sql/commit/17d7b21864b72ba5666f15236474a93268b32ec9
flaskr/flaskr/views/users.py
cwe-089
shame_add
def shame_add(name): shame = shame_ask(name) db = db_connect() cursor = db.cursor() if shame is None: try: cursor.execute(''' INSERT INTO people(name,karma,shame) VALUES('{}',0,1) '''.format(name)) db.commit() logger.debug('Inserted into karmadb 1 shame for {}'.format(name)) db.close() return 1 except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise else: shame = shame + 1 try: cursor.execute(''' UPDATE people SET shame = {0} WHERE name = '{1}' '''.format(shame, name)) db.commit() logger.debug('Inserted into karmadb {} shame for {}'.format( shame, name)) db.close() return shame except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise
def shame_add(name): shame = shame_ask(name) db = db_connect() cursor = db.cursor() if shame is None: try: cursor.execute(''' INSERT INTO people(name,karma,shame) VALUES(%(name)s,0,1) ''', (name, )) db.commit() logger.debug('Inserted into karmadb 1 shame for {}'.format(name)) db.close() return 1 except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise else: shame = shame + 1 try: cursor.execute(''' UPDATE people SET shame = %(karma)s WHERE name = %(name)s ''' ( shame, name, )) db.commit() logger.debug('Inserted into karmadb {} shame for {}'.format( shame, name)) db.close() return shame except Exception as e: logger.error('Execution failed with error: {}'.format(e)) raise
{ "deleted": [ { "line_no": 8, "char_start": 162, "char_end": 232, "line": " INSERT INTO people(name,karma,shame) VALUES('{}',0,1)\n" }, { "line_no": 9, "char_start": 232, "char_end": 266, "line": " '''.format(name))\n" }, { "line_no": 22, "char_start": 612, "char_end": 677, "line": " UPDATE people SET shame = {0} WHERE name = '{1}'\n" }, { "line_no": 23, "char_start": 677, "char_end": 718, "line": " '''.format(shame, name))\n" } ], "added": [ { "line_no": 8, "char_start": 162, "char_end": 236, "line": " INSERT INTO people(name,karma,shame) VALUES(%(name)s,0,1)\n" }, { "line_no": 9, "char_start": 236, "char_end": 267, "line": " ''', (name, ))\n" }, { "line_no": 22, "char_start": 613, "char_end": 687, "line": " UPDATE people SET shame = %(karma)s WHERE name = %(name)s\n" }, { "line_no": 23, "char_start": 687, "char_end": 709, "line": " ''' (\n" }, { "line_no": 24, "char_start": 709, "char_end": 732, "line": " shame,\n" }, { "line_no": 25, "char_start": 732, "char_end": 754, "line": " name,\n" }, { "line_no": 26, "char_start": 754, "char_end": 769, "line": " ))\n" } ] }
{ "deleted": [ { "char_start": 222, "char_end": 226, "chars": "'{}'" }, { "char_start": 251, "char_end": 258, "chars": ".format" }, { "char_start": 654, "char_end": 657, "chars": "{0}" }, { "char_start": 671, "char_end": 676, "chars": "'{1}'" }, { "char_start": 696, "char_end": 703, "chars": ".format" } ], "added": [ { "char_start": 222, "char_end": 230, "chars": "%(name)s" }, { "char_start": 255, "char_end": 257, "chars": ", " }, { "char_start": 262, "char_end": 264, "chars": ", " }, { "char_start": 655, "char_end": 664, "chars": "%(karma)s" }, { "char_start": 678, "char_end": 686, "chars": "%(name)s" }, { "char_start": 706, "char_end": 707, "chars": " " }, { "char_start": 708, "char_end": 725, "chars": "\n " }, { "char_start": 731, "char_end": 747, "chars": "\n " }, { "char_start": 752, "char_end": 766, "chars": ",\n " } ] }
github.com/tylarb/KarmaBoi-PCF/commit/c1d00a27d7f6b7eb6f15a3dacd4269654a32c10a
KarmaBoi/dbopts.py
cwe-089
can_user_pass_that_amount_of_money
def can_user_pass_that_amount_of_money(self, user_id, money): self.cursor.execute("SELECT count(id) FROM kickstarter.users where id = %s and money >= %s" % (user_id, money)) return self.cursor.fetchall()[0][0]
def can_user_pass_that_amount_of_money(self, user_id, money): self.cursor.execute("SELECT count(id) FROM kickstarter.users where id = %s and money >= %s", (user_id, money)) return self.cursor.fetchall()[0][0]
{ "deleted": [ { "line_no": 2, "char_start": 66, "char_end": 186, "line": " self.cursor.execute(\"SELECT count(id) FROM kickstarter.users where id = %s and money >= %s\" % (user_id, money))\n" } ], "added": [ { "line_no": 2, "char_start": 66, "char_end": 185, "line": " self.cursor.execute(\"SELECT count(id) FROM kickstarter.users where id = %s and money >= %s\", (user_id, money))\n" } ] }
{ "deleted": [ { "char_start": 165, "char_end": 167, "chars": " %" } ], "added": [ { "char_start": 165, "char_end": 166, "chars": "," } ] }
github.com/JLucka/kickstarter-dev/commit/e2ffa062697e060fdfbd2eccbb89a8c53a569e0b
backend/transactions/TransactionConnector.py
cwe-089
placings
@endpoints.route("/placings") def placings(): if db == None: init() tag = request.args.get('tag', default='christmas mike') # Get all the urls that this player has participated in sql = "SELECT * FROM placings WHERE player = '{}'".format(tag) results = list(db.exec(sql)) results.sort(key=lambda x: int(x[2])) return json.dumps(results)
@endpoints.route("/placings") def placings(): if db == None: init() tag = request.args.get('tag', default='christmas mike') # Get all the urls that this player has participated in sql = "SELECT * FROM placings WHERE player = '{tag}'" args = {'tag': tag} results = list(db.exec(sql, args)) results.sort(key=lambda x: int(x[2])) return json.dumps(results)
{ "deleted": [ { "line_no": 9, "char_start": 202, "char_end": 269, "line": " sql = \"SELECT * FROM placings WHERE player = '{}'\".format(tag)\n" }, { "line_no": 10, "char_start": 269, "char_end": 302, "line": " results = list(db.exec(sql))\n" } ], "added": [ { "line_no": 9, "char_start": 202, "char_end": 260, "line": " sql = \"SELECT * FROM placings WHERE player = '{tag}'\"\n" }, { "line_no": 10, "char_start": 260, "char_end": 284, "line": " args = {'tag': tag}\n" }, { "line_no": 11, "char_start": 284, "char_end": 323, "line": " results = list(db.exec(sql, args))\n" } ] }
{ "deleted": [ { "char_start": 256, "char_end": 259, "chars": ".fo" }, { "char_start": 260, "char_end": 261, "chars": "m" }, { "char_start": 262, "char_end": 264, "chars": "t(" }, { "char_start": 267, "char_end": 268, "chars": ")" } ], "added": [ { "char_start": 253, "char_end": 256, "chars": "tag" }, { "char_start": 259, "char_end": 265, "chars": "\n a" }, { "char_start": 266, "char_end": 273, "chars": "gs = {'" }, { "char_start": 274, "char_end": 279, "chars": "ag': " }, { "char_start": 282, "char_end": 283, "chars": "}" }, { "char_start": 314, "char_end": 320, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
endpoints.py
cwe-089
markTokenUsedExternal
def markTokenUsedExternal(token, optStr=""): conn, c = connectDB() req = "UPDATE {} SET \"options_selected\"='{}' WHERE token='{}'".format(CFG("tokens_table_name"), \ optStr, token) c.execute(req) closeDB(conn)
def markTokenUsedExternal(token, optStr=""): conn, c = connectDB() req = "UPDATE {} SET \"options_selected\"=? WHERE token=?".format(CFG("tokens_table_name")) c.execute(req, (optStr, token,)) closeDB(conn)
{ "deleted": [ { "line_no": 3, "char_start": 71, "char_end": 175, "line": " req = \"UPDATE {} SET \\\"options_selected\\\"='{}' WHERE token='{}'\".format(CFG(\"tokens_table_name\"), \\\n" }, { "line_no": 4, "char_start": 175, "char_end": 210, "line": " optStr, token)\n" }, { "line_no": 5, "char_start": 210, "char_end": 229, "line": " c.execute(req)\n" } ], "added": [ { "line_no": 3, "char_start": 71, "char_end": 167, "line": " req = \"UPDATE {} SET \\\"options_selected\\\"=? WHERE token=?\".format(CFG(\"tokens_table_name\"))\n" }, { "line_no": 4, "char_start": 167, "char_end": 204, "line": " c.execute(req, (optStr, token,))\n" } ] }
{ "deleted": [ { "char_start": 117, "char_end": 121, "chars": "'{}'" }, { "char_start": 134, "char_end": 138, "chars": "'{}'" }, { "char_start": 171, "char_end": 174, "chars": ", \\" }, { "char_start": 175, "char_end": 177, "chars": " " }, { "char_start": 181, "char_end": 182, "chars": " " }, { "char_start": 183, "char_end": 195, "chars": " " }, { "char_start": 209, "char_end": 227, "chars": "\n c.execute(req" } ], "added": [ { "char_start": 117, "char_end": 118, "chars": "?" }, { "char_start": 131, "char_end": 132, "chars": "?" }, { "char_start": 165, "char_end": 166, "chars": ")" }, { "char_start": 171, "char_end": 185, "chars": "c.execute(req," }, { "char_start": 186, "char_end": 187, "chars": "(" }, { "char_start": 200, "char_end": 201, "chars": "," } ] }
github.com/FAUSheppy/simple-python-poll/commit/186c5ff5cdf58272e253a1bb432419ee50d93109
database.py
cwe-089
get_roster
def get_roster(self, server_id): sql = """SELECT username, role FROM roles WHERE roles.server_id = {0}; """.format(server_id) self.cur.execute(sql) return self.cur.fetchall()
def get_roster(self, server_id): sql = """ SELECT username, role FROM roles WHERE roles.server_id = %s; """ self.cur.execute(sql, (server_id,)) return self.cur.fetchall()
{ "deleted": [ { "line_no": 2, "char_start": 37, "char_end": 76, "line": " sql = \"\"\"SELECT username, role\n" }, { "line_no": 3, "char_start": 76, "char_end": 104, "line": " FROM roles\n" }, { "line_no": 4, "char_start": 104, "char_end": 150, "line": " WHERE roles.server_id = {0};\n" }, { "line_no": 5, "char_start": 150, "char_end": 189, "line": " \"\"\".format(server_id)\n" }, { "line_no": 6, "char_start": 189, "char_end": 219, "line": " self.cur.execute(sql)\n" } ], "added": [ { "line_no": 2, "char_start": 37, "char_end": 55, "line": " sql = \"\"\"\n" }, { "line_no": 3, "char_start": 55, "char_end": 91, "line": " SELECT username, role\n" }, { "line_no": 4, "char_start": 91, "char_end": 116, "line": " FROM roles\n" }, { "line_no": 5, "char_start": 116, "char_end": 158, "line": " WHERE roles.server_id = %s;\n" }, { "line_no": 6, "char_start": 158, "char_end": 176, "line": " \"\"\"\n" }, { "line_no": 7, "char_start": 176, "char_end": 220, "line": " self.cur.execute(sql, (server_id,))\n" } ] }
{ "deleted": [ { "char_start": 76, "char_end": 77, "chars": " " }, { "char_start": 91, "char_end": 93, "chars": " " }, { "char_start": 104, "char_end": 107, "chars": " " }, { "char_start": 145, "char_end": 148, "chars": "{0}" }, { "char_start": 150, "char_end": 153, "chars": " " }, { "char_start": 170, "char_end": 188, "chars": ".format(server_id)" } ], "added": [ { "char_start": 54, "char_end": 69, "chars": "\n " }, { "char_start": 154, "char_end": 156, "chars": "%s" }, { "char_start": 204, "char_end": 218, "chars": ", (server_id,)" } ] }
github.com/jgayfer/spirit/commit/01c846c534c8d3cf6763f8b7444a0efe2caa3799
db/dbase.py
cwe-089
retrieve_video
def retrieve_video(id, playlist_id, db): db.execute("SELECT id, position from video WHERE id={id} and playlist_id={playlist_id};".format( id=id, playlist_id=playlist_id)) row = db.fetchone() return row
def retrieve_video(id, playlist_id, db): db.execute( "SELECT id, position from video WHERE id=%s and playlist_id=%s;", (id, playlist_id)) row = db.fetchone() return row
{ "deleted": [ { "line_no": 2, "char_start": 41, "char_end": 142, "line": " db.execute(\"SELECT id, position from video WHERE id={id} and playlist_id={playlist_id};\".format(\n" }, { "line_no": 3, "char_start": 142, "char_end": 183, "line": " id=id, playlist_id=playlist_id))\n" } ], "added": [ { "line_no": 2, "char_start": 41, "char_end": 57, "line": " db.execute(\n" }, { "line_no": 3, "char_start": 57, "char_end": 150, "line": " \"SELECT id, position from video WHERE id=%s and playlist_id=%s;\", (id, playlist_id))\n" } ] }
{ "deleted": [ { "char_start": 97, "char_end": 101, "chars": "{id}" }, { "char_start": 118, "char_end": 125, "chars": "{playli" }, { "char_start": 126, "char_end": 131, "chars": "t_id}" }, { "char_start": 133, "char_end": 140, "chars": ".format" }, { "char_start": 141, "char_end": 153, "chars": "\n id=" }, { "char_start": 157, "char_end": 169, "chars": "playlist_id=" } ], "added": [ { "char_start": 56, "char_end": 65, "chars": "\n " }, { "char_start": 106, "char_end": 108, "chars": "%s" }, { "char_start": 125, "char_end": 126, "chars": "%" }, { "char_start": 129, "char_end": 130, "chars": "," }, { "char_start": 131, "char_end": 132, "chars": "(" } ] }
github.com/Madmous/playlist/commit/666e52c5f0b8c1f4296e84471637033d9542a7a6
video/video_repository.py
cwe-089
followFriends
def followFriends(self,userid,friendid): sqlText="insert into friends values(%d,%d);"%(friendid,userid) result=sql.insertDB(self.conn,sqlText) return result;
def followFriends(self,userid,friendid): sqlText="insert into friends values(%s,%s);" params=[friendid,userid] result=sql.insertDB(self.conn,sqlText,params) return result;
{ "deleted": [ { "line_no": 2, "char_start": 45, "char_end": 116, "line": " sqlText=\"insert into friends values(%d,%d);\"%(friendid,userid)\n" }, { "line_no": 3, "char_start": 116, "char_end": 163, "line": " result=sql.insertDB(self.conn,sqlText)\n" } ], "added": [ { "line_no": 2, "char_start": 45, "char_end": 98, "line": " sqlText=\"insert into friends values(%s,%s);\"\n" }, { "line_no": 3, "char_start": 98, "char_end": 131, "line": " params=[friendid,userid]\n" }, { "line_no": 4, "char_start": 131, "char_end": 185, "line": " result=sql.insertDB(self.conn,sqlText,params)\n" } ] }
{ "deleted": [ { "char_start": 90, "char_end": 91, "chars": "d" }, { "char_start": 93, "char_end": 94, "chars": "d" }, { "char_start": 97, "char_end": 99, "chars": "%(" }, { "char_start": 114, "char_end": 115, "chars": ")" } ], "added": [ { "char_start": 90, "char_end": 91, "chars": "s" }, { "char_start": 93, "char_end": 94, "chars": "s" }, { "char_start": 97, "char_end": 114, "chars": "\n params=[" }, { "char_start": 129, "char_end": 130, "chars": "]" }, { "char_start": 176, "char_end": 183, "chars": ",params" } ] }
github.com/ShaominLi/Twitter_project/commit/5329d91f9e569c95184053c8e7ef596949c33ce9
modules/users.py
cwe-089
getGameID
def getGameID(ID): db.execute("SELECT * FROM games WHERE ID = %i" % ID) ID = db.fetchone() return ID
def getGameID(ID): db.execute("SELECT * FROM games WHERE ID = ?", ID) ID = db.fetchone() return ID
{ "deleted": [ { "line_no": 2, "char_start": 19, "char_end": 73, "line": "\tdb.execute(\"SELECT * FROM games WHERE ID = %i\" % ID)\n" } ], "added": [ { "line_no": 2, "char_start": 19, "char_end": 71, "line": "\tdb.execute(\"SELECT * FROM games WHERE ID = ?\", ID)\n" } ] }
{ "deleted": [ { "char_start": 63, "char_end": 65, "chars": "%i" }, { "char_start": 66, "char_end": 68, "chars": " %" } ], "added": [ { "char_start": 63, "char_end": 64, "chars": "?" }, { "char_start": 65, "char_end": 66, "chars": "," } ] }
github.com/iScrE4m/XLeague/commit/59cab6e5fd8bd5e47f2418a7c71cb1d4e3cad0d2
plugins/database.py
cwe-089
top_proxies
@app.route('/top_proxies') def top_proxies(): con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "SELECT sum(amount) FROM holders" cur.execute(query) total = cur.fetchone() total_votes = total[0] query = "SELECT voting_as FROM holders WHERE voting_as<>'1.2.5' group by voting_as" cur.execute(query) results = cur.fetchall() #con.close() proxies = [] for p in range(0, len(results)): proxy_line = [0] * 5 proxy_id = results[p][0] proxy_line[0] = proxy_id query = "SELECT account_name, amount FROM holders WHERE account_id='"+proxy_id+"' LIMIT 1" cur.execute(query) proxy = cur.fetchone() try: proxy_name = proxy[0] proxy_amount = proxy[1] except: proxy_name = "unknown" proxy_amount = 0 proxy_line[1] = proxy_name query = "SELECT amount, account_id FROM holders WHERE voting_as='"+proxy_id+"'" cur.execute(query) results2 = cur.fetchall() proxy_line[2] = int(proxy_amount) for p2 in range(0, len(results2)): amount = results2[p2][0] account_id = results2[p2][1] proxy_line[2] = proxy_line[2] + int(amount) # total proxy votes proxy_line[3] = proxy_line[3] + 1 # followers if proxy_line[3] > 2: percentage = float(float(proxy_line[2]) * 100.0/ float(total_votes)) proxy_line[4] = percentage proxies.append(proxy_line) con.close() proxies = sorted(proxies, key=lambda k: int(k[2])) r_proxies = proxies[::-1] return jsonify(filter(None, r_proxies))
@app.route('/top_proxies') def top_proxies(): con = psycopg2.connect(**config.POSTGRES) cur = con.cursor() query = "SELECT sum(amount) FROM holders" cur.execute(query) total = cur.fetchone() total_votes = total[0] query = "SELECT voting_as FROM holders WHERE voting_as<>'1.2.5' group by voting_as" cur.execute(query) results = cur.fetchall() #con.close() proxies = [] for p in range(0, len(results)): proxy_line = [0] * 5 proxy_id = results[p][0] proxy_line[0] = proxy_id query = "SELECT account_name, amount FROM holders WHERE account_id=%s LIMIT 1" cur.execute(query, (proxy_id,)) proxy = cur.fetchone() try: proxy_name = proxy[0] proxy_amount = proxy[1] except: proxy_name = "unknown" proxy_amount = 0 proxy_line[1] = proxy_name query = "SELECT amount, account_id FROM holders WHERE voting_as=%s" cur.execute(query, (proxy_id,)) results2 = cur.fetchall() proxy_line[2] = int(proxy_amount) for p2 in range(0, len(results2)): amount = results2[p2][0] account_id = results2[p2][1] proxy_line[2] = proxy_line[2] + int(amount) # total proxy votes proxy_line[3] = proxy_line[3] + 1 # followers if proxy_line[3] > 2: percentage = float(float(proxy_line[2]) * 100.0/ float(total_votes)) proxy_line[4] = percentage proxies.append(proxy_line) con.close() proxies = sorted(proxies, key=lambda k: int(k[2])) r_proxies = proxies[::-1] return jsonify(filter(None, r_proxies))
{ "deleted": [ { "line_no": 25, "char_start": 551, "char_end": 650, "line": " query = \"SELECT account_name, amount FROM holders WHERE account_id='\"+proxy_id+\"' LIMIT 1\"\n" }, { "line_no": 26, "char_start": 650, "char_end": 677, "line": " cur.execute(query)\n" }, { "line_no": 39, "char_start": 910, "char_end": 998, "line": " query = \"SELECT amount, account_id FROM holders WHERE voting_as='\"+proxy_id+\"'\"\n" }, { "line_no": 40, "char_start": 998, "char_end": 1025, "line": " cur.execute(query)\n" } ], "added": [ { "line_no": 25, "char_start": 551, "char_end": 638, "line": " query = \"SELECT account_name, amount FROM holders WHERE account_id=%s LIMIT 1\"\n" }, { "line_no": 26, "char_start": 638, "char_end": 678, "line": " cur.execute(query, (proxy_id,))\n" }, { "line_no": 39, "char_start": 911, "char_end": 987, "line": " query = \"SELECT amount, account_id FROM holders WHERE voting_as=%s\"\n" }, { "line_no": 40, "char_start": 987, "char_end": 1027, "line": " cur.execute(query, (proxy_id,))\n" } ] }
{ "deleted": [ { "char_start": 626, "char_end": 640, "chars": "'\"+proxy_id+\"'" }, { "char_start": 982, "char_end": 996, "chars": "'\"+proxy_id+\"'" } ], "added": [ { "char_start": 626, "char_end": 628, "chars": "%s" }, { "char_start": 663, "char_end": 676, "chars": ", (proxy_id,)" }, { "char_start": 983, "char_end": 985, "chars": "%s" }, { "char_start": 1012, "char_end": 1025, "chars": ", (proxy_id,)" } ] }
github.com/VinChain/vinchain-python-api-backend/commit/b78088a551fbb712121269c6eb7f43ede120ff60
api.py
cwe-089
userLogin
def userLogin(self): sqlName="select count(*) from users where name='%s' and \ password='%s';"%(self.name,self.password) checkName=sql.queryDB(self.conn,sqlName) result=checkName[0][0] if result == 0: self.clean() return False else: return True
def userLogin(self): sqlName="select count(*) from users where name=%s and password=%s;" params = [self.name,self.password] checkName=sql.queryDB(self.conn,sqlName,params) result=checkName[0][0] if result == 0: self.clean() return False else: return True
{ "deleted": [ { "line_no": 3, "char_start": 26, "char_end": 92, "line": " sqlName=\"select count(*) from users where name='%s' and \\\n" }, { "line_no": 4, "char_start": 92, "char_end": 150, "line": " password='%s';\"%(self.name,self.password)\n" }, { "line_no": 5, "char_start": 150, "char_end": 199, "line": " checkName=sql.queryDB(self.conn,sqlName)\n" }, { "line_no": 6, "char_start": 199, "char_end": 200, "line": "\n" } ], "added": [ { "line_no": 3, "char_start": 26, "char_end": 102, "line": " sqlName=\"select count(*) from users where name=%s and password=%s;\"\n" }, { "line_no": 4, "char_start": 102, "char_end": 145, "line": " params = [self.name,self.password]\n" }, { "line_no": 5, "char_start": 145, "char_end": 201, "line": " checkName=sql.queryDB(self.conn,sqlName,params)\n" } ] }
{ "deleted": [ { "char_start": 81, "char_end": 82, "chars": "'" }, { "char_start": 84, "char_end": 85, "chars": "'" }, { "char_start": 90, "char_end": 91, "chars": "\\" }, { "char_start": 92, "char_end": 100, "chars": " " }, { "char_start": 111, "char_end": 116, "chars": "sword" }, { "char_start": 117, "char_end": 125, "chars": "'%s';\"%(" }, { "char_start": 148, "char_end": 149, "chars": ")" }, { "char_start": 198, "char_end": 199, "chars": "\n" } ], "added": [ { "char_start": 101, "char_end": 120, "chars": "\n params = [" }, { "char_start": 143, "char_end": 144, "chars": "]" }, { "char_start": 192, "char_end": 199, "chars": ",params" } ] }
github.com/ShaominLi/Twitter_project/commit/5329d91f9e569c95184053c8e7ef596949c33ce9
modules/users.py
cwe-089
search_pages
@app.route("/search", methods = ["POST"]) def search_pages(): search = request.form.get("search") page = db.query("select title from page where title = '%s'" % search).namedresult() if len(page) == 0: return redirect("/%s" % search) else: return place_holder(search)
@app.route("/search", methods = ["POST"]) def search_pages(): search = request.form.get("search") page = db.query("select title from page where title = $1", search).namedresult() if len(page) == 0: return redirect("/%s" % search) else: return place_holder(search)
{ "deleted": [ { "line_no": 4, "char_start": 102, "char_end": 190, "line": " page = db.query(\"select title from page where title = '%s'\" % search).namedresult()\n" } ], "added": [ { "line_no": 4, "char_start": 102, "char_end": 187, "line": " page = db.query(\"select title from page where title = $1\", search).namedresult()\n" } ] }
{ "deleted": [ { "char_start": 160, "char_end": 164, "chars": "'%s'" }, { "char_start": 165, "char_end": 167, "chars": " %" } ], "added": [ { "char_start": 160, "char_end": 162, "chars": "$1" }, { "char_start": 163, "char_end": 164, "chars": "," } ] }
github.com/jcortes0309/wiki_flask/commit/a6bf5316abe2eb528adf36c8241a013fd02c5ffa
server.py
cwe-089
get_articles_by_subject
def get_articles_by_subject(subject): with conn.cursor(cursor_factory=DictCursor) as cur: query = "SELECT * FROM articles WHERE subject='" + subject + "' ORDER BY last_submitted DESC" cur.execute(query) articles = cur.fetchall() return articles
def get_articles_by_subject(subject): with conn.cursor(cursor_factory=DictCursor) as cur: query = "SELECT * FROM articles WHERE subject=%s ORDER BY last_submitted DESC" cur.execute(query, (subject,)) articles = cur.fetchall() return articles
{ "deleted": [ { "line_no": 3, "char_start": 94, "char_end": 196, "line": " query = \"SELECT * FROM articles WHERE subject='\" + subject + \"' ORDER BY last_submitted DESC\"\n" }, { "line_no": 4, "char_start": 196, "char_end": 223, "line": " cur.execute(query)\n" } ], "added": [ { "line_no": 3, "char_start": 94, "char_end": 181, "line": " query = \"SELECT * FROM articles WHERE subject=%s ORDER BY last_submitted DESC\"\n" }, { "line_no": 4, "char_start": 181, "char_end": 220, "line": " cur.execute(query, (subject,))\n" } ] }
{ "deleted": [ { "char_start": 148, "char_end": 153, "chars": "'\" + " }, { "char_start": 154, "char_end": 165, "chars": "ubject + \"'" } ], "added": [ { "char_start": 148, "char_end": 149, "chars": "%" }, { "char_start": 206, "char_end": 218, "chars": ", (subject,)" } ] }
github.com/sepehr125/arxiv-doc2vec-recommender/commit/f23a4c32e6192b145017f64734b0a9a384c9123a
app.py
cwe-089
getQueue
def getQueue(self, numberOfLinks=10): self.cursor.execute("SELECT url FROM queue WHERE visited = '0' LIMIT {};".format(numberOfLinks)) result = self.cursor.fetchall() self.remove(result) return result
def getQueue(self, numberOfLinks=10): self.cursor.execute("SELECT url FROM queue WHERE visited = '0' LIMIT ?;", numberOfLinks) result = self.cursor.fetchall() self.remove(result) return result
{ "deleted": [ { "line_no": 2, "char_start": 42, "char_end": 147, "line": " self.cursor.execute(\"SELECT url FROM queue WHERE visited = '0' LIMIT {};\".format(numberOfLinks))\n" } ], "added": [ { "line_no": 2, "char_start": 42, "char_end": 139, "line": " self.cursor.execute(\"SELECT url FROM queue WHERE visited = '0' LIMIT ?;\", numberOfLinks)\n" } ] }
{ "deleted": [ { "char_start": 119, "char_end": 121, "chars": "{}" }, { "char_start": 123, "char_end": 131, "chars": ".format(" }, { "char_start": 144, "char_end": 145, "chars": ")" } ], "added": [ { "char_start": 119, "char_end": 120, "chars": "?" }, { "char_start": 122, "char_end": 124, "chars": ", " } ] }
github.com/jappe999/WebScraper/commit/46a4e0843aa44d903293637afad53dfcbc37b480
beta/database.py
cwe-089
showPoll
@hook.command(autohelp=False) def showPoll(pollID, db=None): """Shows the answers for a given poll.""" if not db_ready: db_init(db) if pollID == None: poll = db.execute("SELECT pollID, question FROM polls WHERE active = 1") if len(poll) == 0: reply("There's no poll open.") return else: poll = db.execute("SELECT pollID, question FROM polls WHERE pollID = '{}'".format(pollID)) if len(poll) == 0: reply("No such poll found.") return pollID = poll[0][0] question = poll[0][1] reply(question) for (index, answer, votes) in db.execute("SELECT 'index', answer, count(voteID) FROM answers LEFT JOIN votes ON votes.answerID = answers.answerID WHERE pollID = {} GROUP BY answers.answerID, 'index', answer ORDER BY 'index' ASC".format(pollID, )): reply("%s. %s (%s)" % (index, answer, votes))
@hook.command(autohelp=False) def showPoll(pollID, db=None): """Shows the answers for a given poll.""" if not db_ready: db_init(db) if pollID == None: poll = db.execute("SELECT pollID, question FROM polls WHERE active = 1") if len(poll) == 0: reply("There's no poll open.") return else: poll = db.execute("SELECT pollID, question FROM polls WHERE pollID = ?", (pollID,)) if len(poll) == 0: reply("No such poll found.") return pollID = poll[0][0] question = poll[0][1] reply(question) for (index, answer, votes) in db.execute("SELECT 'index', answer, count(voteID) FROM answers LEFT JOIN votes ON votes.answerID = answers.answerID WHERE pollID = ? GROUP BY answers.answerID, 'index', answer ORDER BY 'index' ASC", (pollID, )): reply("%s. %s (%s)" % (index, answer, votes))
{ "deleted": [ { "line_no": 11, "char_start": 343, "char_end": 442, "line": " poll = db.execute(\"SELECT pollID, question FROM polls WHERE pollID = '{}'\".format(pollID))\n" }, { "line_no": 18, "char_start": 599, "char_end": 851, "line": " for (index, answer, votes) in db.execute(\"SELECT 'index', answer, count(voteID) FROM answers LEFT JOIN votes ON votes.answerID = answers.answerID WHERE pollID = {} GROUP BY answers.answerID, 'index', answer ORDER BY 'index' ASC\".format(pollID, )):\n" } ], "added": [ { "line_no": 11, "char_start": 343, "char_end": 435, "line": " poll = db.execute(\"SELECT pollID, question FROM polls WHERE pollID = ?\", (pollID,))\n" }, { "line_no": 18, "char_start": 592, "char_end": 838, "line": " for (index, answer, votes) in db.execute(\"SELECT 'index', answer, count(voteID) FROM answers LEFT JOIN votes ON votes.answerID = answers.answerID WHERE pollID = ? GROUP BY answers.answerID, 'index', answer ORDER BY 'index' ASC\", (pollID, )):\n" } ] }
{ "deleted": [ { "char_start": 420, "char_end": 424, "chars": "'{}'" }, { "char_start": 425, "char_end": 432, "chars": ".format" }, { "char_start": 764, "char_end": 766, "chars": "{}" }, { "char_start": 831, "char_end": 838, "chars": ".format" } ], "added": [ { "char_start": 420, "char_end": 421, "chars": "?" }, { "char_start": 422, "char_end": 424, "chars": ", " }, { "char_start": 431, "char_end": 432, "chars": "," }, { "char_start": 757, "char_end": 758, "chars": "?" }, { "char_start": 823, "char_end": 825, "chars": ", " } ] }
github.com/FrozenPigs/Taigabot/commit/ea9b83a66ae1f0f38a1895f3e8dfa2833d77e3a6
plugins/poll.py
cwe-089
process_ranks
def process_ranks(self, scene, urls, recent_date): PLAYER1 = 0 PLAYER2 = 1 WINNER = 2 DATE = 3 SCENE = 4 # make sure if we already have calculated ranks for these players at this time, we do not do it again sql = "SELECT * FROM ranks WHERE scene = '{}' AND date='{}';".format(str(scene), recent_date) res = self.db.exec(sql) if len(res) > 0: LOG.info('We have already calculated ranks for {} on date {}. SKipping'.format(scene, recent_date)) return matches = bracket_utils.get_matches_from_urls(self.db, urls) LOG.info('About to start processing ranks for scene {} on {}'.format(scene, recent_date)) # Iterate through each match, and build up our dict win_loss_dict = {} for match in matches: p1 = match[PLAYER1] p2 = match[PLAYER2] winner = match[WINNER] date = match[DATE] #Add p1 to the dict if p1 not in win_loss_dict: win_loss_dict[p1] = {} if p2 not in win_loss_dict[p1]: win_loss_dict[p1][p2] = [] # Add an entry to represent this match to p1 win_loss_dict[p1][p2].append((date, winner == p1)) # add p2 to the dict if p2 not in win_loss_dict: win_loss_dict[p2] = {} if p1 not in win_loss_dict[p2]: win_loss_dict[p2][p1] = [] win_loss_dict[p2][p1].append((date, winner == p2)) ranks = get_ranks(win_loss_dict) tag_rank_map = {} for i, x in enumerate(ranks): points, player = x rank = len(ranks) - i sql = "INSERT INTO ranks (scene, player, rank, points, date) VALUES ('{}', '{}', '{}', '{}', '{}');"\ .format(str(scene), str(player), int(rank), str(points), str(recent_date)) self.db.exec(sql) # Only count this player if this is the scene he/she belongs to sql = "SELECT scene FROM players WHERE tag='{}';".format(player) res = self.db.exec(sql) if len(res) == 0 or res[0][0] == scene: # Also create a list to update the player web map = {'rank':rank, 'total_ranked':len(ranks)} tag_rank_map[player] = map player_web.update_ranks(tag_rank_map)
def process_ranks(self, scene, urls, recent_date): PLAYER1 = 0 PLAYER2 = 1 WINNER = 2 DATE = 3 SCENE = 4 # make sure if we already have calculated ranks for these players at this time, we do not do it again sql = "SELECT * FROM ranks WHERE scene = '{scene}' AND date='{date}';" args = {'scene': scene, 'date': recent_date} res = self.db.exec(sql, args) if len(res) > 0: LOG.info('We have already calculated ranks for {} on date {}. SKipping'.format(scene, recent_date)) return matches = bracket_utils.get_matches_from_urls(self.db, urls) LOG.info('About to start processing ranks for scene {} on {}'.format(scene, recent_date)) # Iterate through each match, and build up our dict win_loss_dict = {} for match in matches: p1 = match[PLAYER1] p2 = match[PLAYER2] winner = match[WINNER] date = match[DATE] #Add p1 to the dict if p1 not in win_loss_dict: win_loss_dict[p1] = {} if p2 not in win_loss_dict[p1]: win_loss_dict[p1][p2] = [] # Add an entry to represent this match to p1 win_loss_dict[p1][p2].append((date, winner == p1)) # add p2 to the dict if p2 not in win_loss_dict: win_loss_dict[p2] = {} if p1 not in win_loss_dict[p2]: win_loss_dict[p2][p1] = [] win_loss_dict[p2][p1].append((date, winner == p2)) ranks = get_ranks(win_loss_dict) tag_rank_map = {} for i, x in enumerate(ranks): points, player = x rank = len(ranks) - i sql = "INSERT INTO ranks (scene, player, rank, points, date) VALUES ('{scene}', '{player}', '{rank}', '{points}', '{recent_date}');" args = {'scene': scene, 'player': player, 'rank': rank, 'points': points, 'recent_date': recent_date} self.db.exec(sql, args) # Only count this player if this is the scene he/she belongs to sql = "SELECT scene FROM players WHERE tag='{player}';" args = {'player': player} res = self.db.exec(sql, args) if len(res) == 0 or res[0][0] == scene: # Also create a list to update the player web map = {'rank':rank, 'total_ranked':len(ranks)} tag_rank_map[player] = map player_web.update_ranks(tag_rank_map)
{ "deleted": [ { "line_no": 9, "char_start": 260, "char_end": 362, "line": " sql = \"SELECT * FROM ranks WHERE scene = '{}' AND date='{}';\".format(str(scene), recent_date)\n" }, { "line_no": 10, "char_start": 362, "char_end": 394, "line": " res = self.db.exec(sql)\n" }, { "line_no": 52, "char_start": 1725, "char_end": 1839, "line": " sql = \"INSERT INTO ranks (scene, player, rank, points, date) VALUES ('{}', '{}', '{}', '{}', '{}');\"\\\n" }, { "line_no": 53, "char_start": 1839, "char_end": 1934, "line": " .format(str(scene), str(player), int(rank), str(points), str(recent_date))\n" }, { "line_no": 54, "char_start": 1934, "char_end": 1964, "line": " self.db.exec(sql)\n" }, { "line_no": 57, "char_start": 2041, "char_end": 2118, "line": " sql = \"SELECT scene FROM players WHERE tag='{}';\".format(player)\n" }, { "line_no": 58, "char_start": 2118, "char_end": 2154, "line": " res = self.db.exec(sql)\n" } ], "added": [ { "line_no": 9, "char_start": 260, "char_end": 339, "line": " sql = \"SELECT * FROM ranks WHERE scene = '{scene}' AND date='{date}';\"\n" }, { "line_no": 10, "char_start": 339, "char_end": 392, "line": " args = {'scene': scene, 'date': recent_date}\n" }, { "line_no": 11, "char_start": 392, "char_end": 430, "line": " res = self.db.exec(sql, args)\n" }, { "line_no": 53, "char_start": 1761, "char_end": 1906, "line": " sql = \"INSERT INTO ranks (scene, player, rank, points, date) VALUES ('{scene}', '{player}', '{rank}', '{points}', '{recent_date}');\"\n" }, { "line_no": 54, "char_start": 1906, "char_end": 2020, "line": " args = {'scene': scene, 'player': player, 'rank': rank, 'points': points, 'recent_date': recent_date}\n" }, { "line_no": 55, "char_start": 2020, "char_end": 2056, "line": " self.db.exec(sql, args)\n" }, { "line_no": 58, "char_start": 2133, "char_end": 2201, "line": " sql = \"SELECT scene FROM players WHERE tag='{player}';\"\n" }, { "line_no": 59, "char_start": 2201, "char_end": 2239, "line": " args = {'player': player}\n" }, { "line_no": 60, "char_start": 2239, "char_end": 2281, "line": " res = self.db.exec(sql, args)\n" } ] }
{ "deleted": [ { "char_start": 329, "char_end": 334, "chars": ".form" }, { "char_start": 335, "char_end": 339, "chars": "t(st" }, { "char_start": 340, "char_end": 341, "chars": "(" }, { "char_start": 346, "char_end": 347, "chars": ")" }, { "char_start": 360, "char_end": 361, "chars": ")" }, { "char_start": 1837, "char_end": 1838, "chars": "\\" }, { "char_start": 1853, "char_end": 1857, "chars": " " }, { "char_start": 1858, "char_end": 1871, "chars": " .format(str(" }, { "char_start": 1876, "char_end": 1877, "chars": ")" }, { "char_start": 1879, "char_end": 1881, "chars": "st" }, { "char_start": 1882, "char_end": 1883, "chars": "(" }, { "char_start": 1889, "char_end": 1890, "chars": ")" }, { "char_start": 1892, "char_end": 1893, "chars": "i" }, { "char_start": 1894, "char_end": 1896, "chars": "t(" }, { "char_start": 1900, "char_end": 1901, "chars": ")" }, { "char_start": 1903, "char_end": 1904, "chars": "s" }, { "char_start": 1905, "char_end": 1907, "chars": "r(" }, { "char_start": 1913, "char_end": 1914, "chars": ")" }, { "char_start": 1916, "char_end": 1917, "chars": "s" }, { "char_start": 1918, "char_end": 1920, "chars": "r(" }, { "char_start": 1931, "char_end": 1933, "chars": "))" }, { "char_start": 2102, "char_end": 2105, "chars": ".fo" }, { "char_start": 2106, "char_end": 2107, "chars": "m" }, { "char_start": 2108, "char_end": 2110, "chars": "t(" }, { "char_start": 2116, "char_end": 2117, "chars": ")" } ], "added": [ { "char_start": 311, "char_end": 316, "chars": "scene" }, { "char_start": 330, "char_end": 334, "chars": "date" }, { "char_start": 338, "char_end": 348, "chars": "\n a" }, { "char_start": 349, "char_end": 356, "chars": "gs = {'" }, { "char_start": 357, "char_end": 364, "chars": "cene': " }, { "char_start": 371, "char_end": 379, "chars": "'date': " }, { "char_start": 390, "char_end": 391, "chars": "}" }, { "char_start": 422, "char_end": 428, "chars": ", args" }, { "char_start": 1844, "char_end": 1849, "chars": "scene" }, { "char_start": 1855, "char_end": 1861, "chars": "player" }, { "char_start": 1867, "char_end": 1871, "chars": "rank" }, { "char_start": 1877, "char_end": 1883, "chars": "points" }, { "char_start": 1889, "char_end": 1900, "chars": "recent_date" }, { "char_start": 1918, "char_end": 1922, "chars": "args" }, { "char_start": 1923, "char_end": 1924, "chars": "=" }, { "char_start": 1925, "char_end": 1927, "chars": "{'" }, { "char_start": 1928, "char_end": 1935, "chars": "cene': " }, { "char_start": 1942, "char_end": 1948, "chars": "'playe" }, { "char_start": 1949, "char_end": 1952, "chars": "': " }, { "char_start": 1960, "char_end": 1963, "chars": "'ra" }, { "char_start": 1964, "char_end": 1968, "chars": "k': " }, { "char_start": 1974, "char_end": 1980, "chars": "'point" }, { "char_start": 1981, "char_end": 1984, "chars": "': " }, { "char_start": 1992, "char_end": 1993, "chars": "'" }, { "char_start": 1994, "char_end": 2007, "chars": "ecent_date': " }, { "char_start": 2018, "char_end": 2019, "chars": "}" }, { "char_start": 2048, "char_end": 2054, "chars": ", args" }, { "char_start": 2190, "char_end": 2196, "chars": "player" }, { "char_start": 2200, "char_end": 2214, "chars": "\n a" }, { "char_start": 2215, "char_end": 2224, "chars": "gs = {'pl" }, { "char_start": 2225, "char_end": 2231, "chars": "yer': " }, { "char_start": 2237, "char_end": 2238, "chars": "}" }, { "char_start": 2273, "char_end": 2279, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
process_data.py
cwe-089
analyze_smashgg
def analyze_smashgg(self, urls, name): LOG.info('we are about to analyze scene {} with {} brackets'.format(name, len(urls))) for url in urls: # Before we process this URL, check to see if we already have sql = "SELECT * FROM analyzed where base_url='{}'".format(url) res = self.db.exec(sql) if len(res) == 0: display_name = bracket_utils.get_display_base(url) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue LOG.info('About to process pro bracket {}'.format(url)) self.data_processor.process(url, name, display_name) else: LOG.info("Skpping pro bracket because it has already been analyzed: {}".format(url))
def analyze_smashgg(self, urls, name): LOG.info('we are about to analyze scene {} with {} brackets'.format(name, len(urls))) for url in urls: # Before we process this URL, check to see if we already have sql = "SELECT * FROM analyzed where base_url='{url}'" args = {'url':url} res = self.db.exec(sql, args) if len(res) == 0: display_name = bracket_utils.get_display_base(url) # We don't care about doubles tournaments if 'doubles' in display_name.lower() or 'dubs' in display_name.lower(): LOG.info('We are skipping the tournament {} because it is a doubles tournament'.format(display_name)) continue LOG.info('About to process pro bracket {}'.format(url)) self.data_processor.process(url, name, display_name) else: LOG.info("Skpping pro bracket because it has already been analyzed: {}".format(url))
{ "deleted": [ { "line_no": 5, "char_start": 236, "char_end": 311, "line": " sql = \"SELECT * FROM analyzed where base_url='{}'\".format(url)\n" }, { "line_no": 6, "char_start": 311, "char_end": 347, "line": " res = self.db.exec(sql)\n" } ], "added": [ { "line_no": 5, "char_start": 236, "char_end": 302, "line": " sql = \"SELECT * FROM analyzed where base_url='{url}'\"\n" }, { "line_no": 6, "char_start": 302, "char_end": 333, "line": " args = {'url':url}\n" }, { "line_no": 7, "char_start": 333, "char_end": 375, "line": " res = self.db.exec(sql, args)\n" } ] }
{ "deleted": [ { "char_start": 298, "char_end": 303, "chars": ".form" }, { "char_start": 304, "char_end": 306, "chars": "t(" }, { "char_start": 309, "char_end": 310, "chars": ")" } ], "added": [ { "char_start": 295, "char_end": 298, "chars": "url" }, { "char_start": 301, "char_end": 324, "chars": "\n args = {'u" }, { "char_start": 325, "char_end": 328, "chars": "l':" }, { "char_start": 331, "char_end": 332, "chars": "}" }, { "char_start": 367, "char_end": 373, "chars": ", args" } ] }
github.com/DKelle/Smash_stats/commit/4bb83f3f6ce7d6bebbeb512cd015f9e72cf36d63
validURLs.py
cwe-089
test
@mod.route('/test', methods=['GET', 'POST']) def test(): user_id = session['logged_id'] sql = 'SELECT * FROM message where user_id = %d ORDER BY c_time DESC' \ % (user_id) cursor.execute(sql) m = cursor.fetchall() print(m)
@mod.route('/test', methods=['GET', 'POST']) def test(): user_id = session['logged_id'] cursor.execute('SELECT * FROM message where user_id = %s ORDER BY c_time DESC', (user_id,)) m = cursor.fetchall() print(m)
{ "deleted": [ { "line_no": 4, "char_start": 92, "char_end": 168, "line": " sql = 'SELECT * FROM message where user_id = %d ORDER BY c_time DESC' \\\n" }, { "line_no": 5, "char_start": 168, "char_end": 188, "line": " % (user_id)\n" }, { "line_no": 6, "char_start": 188, "char_end": 212, "line": " cursor.execute(sql)\n" } ], "added": [ { "line_no": 4, "char_start": 92, "char_end": 188, "line": " cursor.execute('SELECT * FROM message where user_id = %s ORDER BY c_time DESC', (user_id,))\n" } ] }
{ "deleted": [ { "char_start": 97, "char_end": 102, "chars": "ql = " }, { "char_start": 142, "char_end": 143, "chars": "d" }, { "char_start": 165, "char_end": 177, "chars": " \\\n %" }, { "char_start": 187, "char_end": 210, "chars": "\n cursor.execute(sql" } ], "added": [ { "char_start": 96, "char_end": 99, "chars": "cur" }, { "char_start": 100, "char_end": 111, "chars": "or.execute(" }, { "char_start": 151, "char_end": 152, "chars": "s" }, { "char_start": 174, "char_end": 175, "chars": "," }, { "char_start": 184, "char_end": 185, "chars": "," } ] }
github.com/ulyssetsd/bjtu-sql/commit/17d7b21864b72ba5666f15236474a93268b32ec9
flaskr/flaskr/views/message.py
cwe-089
get_last_active_users
@staticmethod def get_last_active_users(limit): """ Get from the database a tuple of users who have been recently using the bot :param limit: integer that specifies how much users to get :return: tuple of tuples with users info """ log.info('Evaluating last active users with date of ' 'last time when they used bot...') # From photo_queries_table2 we take chat_id of the last # active users and from 'users' table we take info about these # users by chat_id which is a foreign key query = ('SELECT p.chat_id, u.first_name, u.nickname, u.last_name, ' 'u.language ' 'FROM photo_queries_table2 p ' 'INNER JOIN users u ' 'ON p.chat_id = u.chat_id ' 'GROUP BY u.chat_id, u.first_name, u.nickname, u.last_name, ' 'u.language ' 'ORDER BY MAX(time)' f'DESC LIMIT {limit}') try: cursor = db.execute_query(query) except DatabaseConnectionError: log.error("Cannot get the last active users because of some " "problems with the database") raise last_active_users = cursor.fetchall() return last_active_users
@staticmethod def get_last_active_users(limit): """ Get from the database a tuple of users who have been recently using the bot :param limit: integer that specifies how much users to get :return: tuple of tuples with users info """ log.info('Evaluating last active users with date of ' 'last time when they used bot...') # From photo_queries_table2 we take chat_id of the last # active users and from 'users' table we take info about these # users by chat_id which is a foreign key query = ('SELECT p.chat_id, u.first_name, u.nickname, u.last_name, ' 'u.language ' 'FROM photo_queries_table2 p ' 'INNER JOIN users u ' 'ON p.chat_id = u.chat_id ' 'GROUP BY u.chat_id, u.first_name, u.nickname, u.last_name, ' 'u.language ' 'ORDER BY MAX(time)' f'DESC LIMIT %s') parameters = limit, try: cursor = db.execute_query(query, parameters) except DatabaseConnectionError: log.error("Cannot get the last active users because of some " "problems with the database") raise last_active_users = cursor.fetchall() return last_active_users
{ "deleted": [ { "line_no": 23, "char_start": 976, "char_end": 1016, "line": " f'DESC LIMIT {limit}')\n" }, { "line_no": 26, "char_start": 1030, "char_end": 1075, "line": " cursor = db.execute_query(query)\n" } ], "added": [ { "line_no": 23, "char_start": 976, "char_end": 1011, "line": " f'DESC LIMIT %s')\n" }, { "line_no": 24, "char_start": 1011, "char_end": 1012, "line": "\n" }, { "line_no": 25, "char_start": 1012, "char_end": 1040, "line": " parameters = limit,\n" }, { "line_no": 28, "char_start": 1054, "char_end": 1111, "line": " cursor = db.execute_query(query, parameters)\n" } ] }
{ "deleted": [ { "char_start": 1006, "char_end": 1007, "chars": "{" }, { "char_start": 1012, "char_end": 1015, "chars": "}')" } ], "added": [ { "char_start": 1006, "char_end": 1033, "chars": "%s')\n\n parameters = " }, { "char_start": 1038, "char_end": 1039, "chars": "," }, { "char_start": 1097, "char_end": 1109, "chars": ", parameters" } ] }
github.com/RandyRomero/photoGPSbot/commit/0e9f57f13e61863b3672f5730e27f149da00786a
photogpsbot/users.py
cwe-089
on_save
def on_save(self): connection = get_connection() cursor = connection.cursor() cursor.execute( f"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values ('{self.ip_address}', '{self.user_agent}', '{self.referrer}', '{self.full_path}', '{self.visit_time}');") connection.commit() connection.close() return 0
def on_save(self): connection = get_connection() cursor = connection.cursor() cursor.execute( "insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values (%s, %s, %s, %s, %s);", (str(self.ip_address), str(self.user_agent), str(self.referrer), str(self.full_path), self.visit_time)) connection.commit() connection.close() return 0
{ "deleted": [ { "line_no": 5, "char_start": 122, "char_end": 328, "line": " f\"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values ('{self.ip_address}', '{self.user_agent}', '{self.referrer}', '{self.full_path}', '{self.visit_time}');\")\n" } ], "added": [ { "line_no": 5, "char_start": 122, "char_end": 245, "line": " \"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values (%s, %s, %s, %s, %s);\",\n" }, { "line_no": 6, "char_start": 245, "char_end": 361, "line": " (str(self.ip_address), str(self.user_agent), str(self.referrer), str(self.full_path), self.visit_time))\n" } ] }
{ "deleted": [ { "char_start": 134, "char_end": 135, "chars": "f" }, { "char_start": 223, "char_end": 225, "chars": "'{" }, { "char_start": 240, "char_end": 242, "chars": "}'" }, { "char_start": 244, "char_end": 246, "chars": "'{" }, { "char_start": 261, "char_end": 263, "chars": "}'" }, { "char_start": 265, "char_end": 267, "chars": "'{" }, { "char_start": 280, "char_end": 282, "chars": "}'" }, { "char_start": 284, "char_end": 286, "chars": "'{" }, { "char_start": 300, "char_end": 302, "chars": "}'" }, { "char_start": 304, "char_end": 306, "chars": "'{" }, { "char_start": 321, "char_end": 323, "chars": "}'" }, { "char_start": 324, "char_end": 326, "chars": ";\"" } ], "added": [ { "char_start": 222, "char_end": 262, "chars": "%s, %s, %s, %s, %s);\",\n (str(" }, { "char_start": 277, "char_end": 278, "chars": ")" }, { "char_start": 280, "char_end": 284, "chars": "str(" }, { "char_start": 299, "char_end": 300, "chars": ")" }, { "char_start": 302, "char_end": 306, "chars": "str(" }, { "char_start": 319, "char_end": 320, "chars": ")" }, { "char_start": 322, "char_end": 326, "chars": "str(" }, { "char_start": 340, "char_end": 341, "chars": ")" } ] }
github.com/onewyoming/onewyoming/commit/54fc7b076fda2de74eeb55e6b75b28e09ef231c2
experimental/python/buford/model/visitor.py
cwe-089
update_institutions
def update_institutions(conn, sqlite, k10plus, ai): """ Update the institution table. """ current_institutions = get_all_current_institutions(k10plus, ai) old_institutions = get_all_old_institutions(conn, sqlite) # Check if the institution table is allready filled and this is not the first checkup institution_table_is_filled = len(old_institutions) > 10 for old_institution in old_institutions: if institution_table_is_filled and old_institution not in current_institutions: message = "Die ISIL %s ist im aktuellen Import nicht mehr vorhanden.\nWenn dies beabsichtigt ist, bitte die Institution aus der Datenbank loeschen." % old_institution send_message(message) for current_institution in current_institutions: if current_institution == " " or '"' in current_institution: continue if current_institution not in old_institutions: message = "The institution %s is new in Solr." % current_institution if institution_table_is_filled: send_message(message) else: logging.info(message) sql = "INSERT INTO institution (institution) VALUES ('%s')" % current_institution sqlite.execute(sql) conn.commit()
def update_institutions(conn, sqlite, k10plus, ai): """ Update the institution table. """ current_institutions = get_all_current_institutions(k10plus, ai) old_institutions = get_all_old_institutions(conn, sqlite) # Check if the institution table is allready filled and this is not the first checkup institution_table_is_filled = len(old_institutions) > 10 for old_institution in old_institutions: if institution_table_is_filled and old_institution not in current_institutions: message = "Die ISIL %s ist im aktuellen Import nicht mehr vorhanden.\nWenn dies beabsichtigt ist, bitte die Institution aus der Datenbank loeschen." % old_institution send_message(message) for current_institution in current_institutions: if current_institution == " " or '"' in current_institution: continue if current_institution not in old_institutions: message = "The institution %s is new in Solr." % current_institution if institution_table_is_filled: send_message(message) else: logging.info(message) sql = "INSERT INTO institution (institution) VALUES (?)" sqlite.execute(sql, (current_institution,)) conn.commit()
{ "deleted": [ { "line_no": 25, "char_start": 1155, "char_end": 1249, "line": " sql = \"INSERT INTO institution (institution) VALUES ('%s')\" % current_institution\n" }, { "line_no": 26, "char_start": 1249, "char_end": 1281, "line": " sqlite.execute(sql)\n" } ], "added": [ { "line_no": 25, "char_start": 1155, "char_end": 1224, "line": " sql = \"INSERT INTO institution (institution) VALUES (?)\"\n" }, { "line_no": 26, "char_start": 1224, "char_end": 1280, "line": " sqlite.execute(sql, (current_institution,))\n" } ] }
{ "deleted": [ { "char_start": 1220, "char_end": 1224, "chars": "'%s'" }, { "char_start": 1226, "char_end": 1248, "chars": " % current_institution" } ], "added": [ { "char_start": 1220, "char_end": 1221, "chars": "?" }, { "char_start": 1254, "char_end": 1278, "chars": ", (current_institution,)" } ] }
github.com/miku/siskin/commit/7fa398d2fea72bf2e8b4808f75df4b3d35ae959a
bin/solrcheckup.py
cwe-089
_add_to_db
@staticmethod def _add_to_db(user): """ Adds User object to the database :param user: User object with info about user :return: None """ query = ("INSERT INTO users (chat_id, first_name, nickname, " "last_name, language) " f"VALUES ({user.chat_id}, '{user.first_name}', " f"'{user.nickname}', '{user.last_name}', '{user.language}')") try: db.add(query) except DatabaseError: log.error("Cannot add user to the database") else: log.info(f"User {user} was successfully added to the users db")
@staticmethod def _add_to_db(user): """ Adds User object to the database :param user: User object with info about user :return: None """ query = ("INSERT INTO users (chat_id, first_name, nickname, " "last_name, language) " f"VALUES (%s, %s, %s, %s, %s)") parameters = (user.chat_id, user.first_name, user.nickname, user.last_name, user.language) try: db.add(query, parameters) except DatabaseError: log.error("Cannot add user to the database") else: log.info(f"User {user} was successfully added to the users db")
{ "deleted": [ { "line_no": 10, "char_start": 296, "char_end": 362, "line": " f\"VALUES ({user.chat_id}, '{user.first_name}', \"\n" }, { "line_no": 11, "char_start": 362, "char_end": 441, "line": " f\"'{user.nickname}', '{user.last_name}', '{user.language}')\")\n" }, { "line_no": 13, "char_start": 454, "char_end": 480, "line": " db.add(query)\n" } ], "added": [ { "line_no": 10, "char_start": 296, "char_end": 345, "line": " f\"VALUES (%s, %s, %s, %s, %s)\")\n" }, { "line_no": 11, "char_start": 345, "char_end": 346, "line": "\n" }, { "line_no": 12, "char_start": 346, "char_end": 414, "line": " parameters = (user.chat_id, user.first_name, user.nickname,\n" }, { "line_no": 13, "char_start": 414, "char_end": 467, "line": " user.last_name, user.language)\n" }, { "line_no": 14, "char_start": 467, "char_end": 468, "line": "\n" }, { "line_no": 16, "char_start": 481, "char_end": 519, "line": " db.add(query, parameters)\n" } ] }
{ "deleted": [ { "char_start": 323, "char_end": 324, "chars": "{" }, { "char_start": 336, "char_end": 337, "chars": "}" }, { "char_start": 339, "char_end": 341, "chars": "'{" }, { "char_start": 356, "char_end": 358, "chars": "}'" }, { "char_start": 360, "char_end": 361, "chars": "\"" }, { "char_start": 379, "char_end": 399, "chars": "f\"'{user.nickname}'," }, { "char_start": 400, "char_end": 402, "chars": "'{" }, { "char_start": 416, "char_end": 418, "chars": "}'" }, { "char_start": 420, "char_end": 422, "chars": "'{" }, { "char_start": 435, "char_end": 437, "chars": "}'" }, { "char_start": 438, "char_end": 440, "chars": "\")" } ], "added": [ { "char_start": 323, "char_end": 368, "chars": "%s, %s, %s, %s, %s)\")\n\n parameters = (" }, { "char_start": 399, "char_end": 413, "chars": "user.nickname," }, { "char_start": 431, "char_end": 435, "chars": " " }, { "char_start": 466, "char_end": 467, "chars": "\n" }, { "char_start": 505, "char_end": 517, "chars": ", parameters" } ] }
github.com/RandyRomero/photoGPSbot/commit/0e9f57f13e61863b3672f5730e27f149da00786a
photogpsbot/users.py
cwe-089
deletePost
def deletePost(self,postid): sqlText="delete from post where post.postid=%d"%(postid) result=sql.deleteDB(self.conn,sqlText) return result;
def deletePost(self,postid): sqlText="delete from post where post.postid=%s" params=[postid] result=sql.deleteDB(self.conn,sqlText,params) return result;
{ "deleted": [ { "line_no": 2, "char_start": 33, "char_end": 98, "line": " sqlText=\"delete from post where post.postid=%d\"%(postid)\n" }, { "line_no": 3, "char_start": 98, "char_end": 145, "line": " result=sql.deleteDB(self.conn,sqlText)\n" } ], "added": [ { "line_no": 2, "char_start": 33, "char_end": 89, "line": " sqlText=\"delete from post where post.postid=%s\"\n" }, { "line_no": 3, "char_start": 89, "char_end": 113, "line": " params=[postid]\n" }, { "line_no": 4, "char_start": 113, "char_end": 167, "line": " result=sql.deleteDB(self.conn,sqlText,params)\n" } ] }
{ "deleted": [ { "char_start": 86, "char_end": 87, "chars": "d" }, { "char_start": 88, "char_end": 90, "chars": "%(" }, { "char_start": 96, "char_end": 97, "chars": ")" } ], "added": [ { "char_start": 86, "char_end": 87, "chars": "s" }, { "char_start": 88, "char_end": 105, "chars": "\n params=[" }, { "char_start": 111, "char_end": 112, "chars": "]" }, { "char_start": 158, "char_end": 165, "chars": ",params" } ] }
github.com/ShaominLi/Twitter_project/commit/5329d91f9e569c95184053c8e7ef596949c33ce9
modules/post.py
cwe-089
sloka
@app.route('/sloka') def sloka(): sloka_number = request.args.get('sloka_number') sloka_number_parts = sloka_number.split('.') sloka_number_previous = "%s.%s.%d" % (sloka_number_parts[0], sloka_number_parts[1], int(sloka_number_parts[2])-1) sloka_number_next = "%s.%s.%d" % (sloka_number_parts[0], sloka_number_parts[1], int(sloka_number_parts[2])+1) try: with sql.connect('amara.db') as con: con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from mula where sloka_number = '%s' order by sloka_line;" % sloka_number) mula = cur.fetchall(); cur.execute("select * from pada where sloka_number = '%s' order by id;" % sloka_number) pada = cur.fetchall(); varga = "" if len(pada) > 0: varga = pada[0]["varga"] return render_template('sloka.html', mula=mula, pada=pada, varga=varga, sloka_number=sloka_number, sloka_number_previous=sloka_number_previous, sloka_number_next=sloka_number_next) finally: con.close()
@app.route('/sloka') def sloka(): sloka_number = request.args.get('sloka_number') sloka_number_parts = sloka_number.split('.') sloka_number_previous = "%s.%s.%d" % (sloka_number_parts[0], sloka_number_parts[1], int(sloka_number_parts[2])-1) sloka_number_next = "%s.%s.%d" % (sloka_number_parts[0], sloka_number_parts[1], int(sloka_number_parts[2])+1) try: with sql.connect('amara.db') as con: con.row_factory = sql.Row cur = con.cursor() cur.execute("select * from mula where sloka_number = ? order by sloka_line;", [sloka_number]) mula = cur.fetchall(); cur.execute("select * from pada where sloka_number = ? order by id;", [sloka_number]) pada = cur.fetchall(); varga = "" if len(pada) > 0: varga = pada[0]["varga"] return render_template('sloka.html', mula=mula, pada=pada, varga=varga, sloka_number=sloka_number, sloka_number_previous=sloka_number_previous, sloka_number_next=sloka_number_next) finally: con.close()
{ "deleted": [ { "line_no": 15, "char_start": 494, "char_end": 602, "line": " cur.execute(\"select * from mula where sloka_number = '%s' order by sloka_line;\" % sloka_number)\n" }, { "line_no": 18, "char_start": 638, "char_end": 738, "line": " cur.execute(\"select * from pada where sloka_number = '%s' order by id;\" % sloka_number)\n" } ], "added": [ { "line_no": 15, "char_start": 494, "char_end": 600, "line": " cur.execute(\"select * from mula where sloka_number = ? order by sloka_line;\", [sloka_number])\n" }, { "line_no": 18, "char_start": 636, "char_end": 734, "line": " cur.execute(\"select * from pada where sloka_number = ? order by id;\", [sloka_number])\n" } ] }
{ "deleted": [ { "char_start": 559, "char_end": 563, "chars": "'%s'" }, { "char_start": 585, "char_end": 587, "chars": " %" }, { "char_start": 703, "char_end": 707, "chars": "'%s'" }, { "char_start": 721, "char_end": 723, "chars": " %" } ], "added": [ { "char_start": 559, "char_end": 560, "chars": "?" }, { "char_start": 582, "char_end": 583, "chars": "," }, { "char_start": 584, "char_end": 585, "chars": "[" }, { "char_start": 597, "char_end": 598, "chars": "]" }, { "char_start": 701, "char_end": 702, "chars": "?" }, { "char_start": 716, "char_end": 717, "chars": "," }, { "char_start": 718, "char_end": 719, "chars": "[" }, { "char_start": 731, "char_end": 732, "chars": "]" } ] }
github.com/aupasana/amara-quiz/commit/6ceb5dc8ec38b4a3f1399e578ab970f7e3354922
docker/app.py
cwe-089
update_title
def update_title(self, title = None): if (not self.title): self.title = title # This will fall to a sql injection sql = "UPDATE jdk_entries SET title = '" + self.title + "'" + \ "WHERE jdk_entries.id = '" + self.entry_id + "';" db_execute(sql) self.update_date_modified() return None
def update_title(self, title = None): if (not self.title): self.title = title quote_tuple = self.title, self.entry_id # This will fall to a sql injection sql = "UPDATE jdk_entries SET title = ?" + \ "WHERE jdk_entries.id = ?;" db_execute(sql, quote_tuple) self.update_date_modified() return None
{ "deleted": [ { "line_no": 6, "char_start": 132, "char_end": 200, "line": " sql = \"UPDATE jdk_entries SET title = '\" + self.title + \"'\" + \\\n" }, { "line_no": 7, "char_start": 200, "char_end": 261, "line": " \"WHERE jdk_entries.id = '\" + self.entry_id + \"';\" \n" }, { "line_no": 9, "char_start": 262, "char_end": 282, "line": " db_execute(sql)\n" } ], "added": [ { "line_no": 5, "char_start": 91, "char_end": 135, "line": " quote_tuple = self.title, self.entry_id\n" }, { "line_no": 6, "char_start": 135, "char_end": 136, "line": "\n" }, { "line_no": 8, "char_start": 177, "char_end": 226, "line": " sql = \"UPDATE jdk_entries SET title = ?\" + \\\n" }, { "line_no": 9, "char_start": 226, "char_end": 265, "line": " \"WHERE jdk_entries.id = ?;\" \n" }, { "line_no": 11, "char_start": 266, "char_end": 299, "line": " db_execute(sql, quote_tuple)\n" } ] }
{ "deleted": [ { "char_start": 174, "char_end": 194, "chars": "'\" + self.title + \"'" }, { "char_start": 234, "char_end": 257, "chars": "'\" + self.entry_id + \"'" } ], "added": [ { "char_start": 95, "char_end": 140, "chars": "quote_tuple = self.title, self.entry_id\n\n " }, { "char_start": 219, "char_end": 220, "chars": "?" }, { "char_start": 260, "char_end": 261, "chars": "?" }, { "char_start": 284, "char_end": 297, "chars": ", quote_tuple" } ] }
github.com/peterlebrun/jdk/commit/000238566fbe55ba09676c3d57af04ae207235ae
entry.py
cwe-089
add_language
def add_language(self, language): """"Add new language for item translations.""" if self.connection: self.cursor.execute('insert into itemlanguage (language) values ("%s")' % language[0]) self.connection.commit()
def add_language(self, language): """"Add new language for item translations.""" if self.connection: t = (language[0], ) self.cursor.execute('insert into itemlanguage (language) values (?)', t) self.connection.commit()
{ "deleted": [ { "line_no": 4, "char_start": 121, "char_end": 220, "line": " self.cursor.execute('insert into itemlanguage (language) values (\"%s\")' % language[0])\n" } ], "added": [ { "line_no": 4, "char_start": 121, "char_end": 153, "line": " t = (language[0], )\n" }, { "line_no": 5, "char_start": 153, "char_end": 238, "line": " self.cursor.execute('insert into itemlanguage (language) values (?)', t)\n" } ] }
{ "deleted": [ { "char_start": 198, "char_end": 202, "chars": "\"%s\"" }, { "char_start": 204, "char_end": 206, "chars": " %" }, { "char_start": 207, "char_end": 218, "chars": "language[0]" } ], "added": [ { "char_start": 133, "char_end": 165, "chars": "t = (language[0], )\n " }, { "char_start": 230, "char_end": 231, "chars": "?" }, { "char_start": 233, "char_end": 234, "chars": "," }, { "char_start": 235, "char_end": 236, "chars": "t" } ] }
github.com/ecosl-developers/ecosl/commit/8af050a513338bf68ff2a243e4a2482d24e9aa3a
ecosldb/ecosldb.py
cwe-089
AdaptiveThresholdImage
MagickExport Image *AdaptiveThresholdImage(const Image *image, const size_t width,const size_t height,const ssize_t offset, ExceptionInfo *exception) { #define ThresholdImageTag "Threshold/Image" CacheView *image_view, *threshold_view; Image *threshold_image; MagickBooleanType status; MagickOffsetType progress; MagickPixelPacket zero; MagickRealType number_pixels; ssize_t y; assert(image != (const Image *) NULL); assert(image->signature == MagickCoreSignature); if (image->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s",image->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickCoreSignature); threshold_image=CloneImage(image,0,0,MagickTrue,exception); if (threshold_image == (Image *) NULL) return((Image *) NULL); if (SetImageStorageClass(threshold_image,DirectClass) == MagickFalse) { InheritException(exception,&threshold_image->exception); threshold_image=DestroyImage(threshold_image); return((Image *) NULL); } /* Local adaptive threshold. */ status=MagickTrue; progress=0; GetMagickPixelPacket(image,&zero); number_pixels=(MagickRealType) (width*height); image_view=AcquireVirtualCacheView(image,exception); threshold_view=AcquireAuthenticCacheView(threshold_image,exception); #if defined(MAGICKCORE_OPENMP_SUPPORT) #pragma omp parallel for schedule(static) shared(progress,status) \ magick_number_threads(image,threshold_image,image->rows,1) #endif for (y=0; y < (ssize_t) image->rows; y++) { MagickBooleanType sync; MagickPixelPacket channel_bias, channel_sum; register const IndexPacket *magick_restrict indexes; register const PixelPacket *magick_restrict p, *magick_restrict r; register IndexPacket *magick_restrict threshold_indexes; register PixelPacket *magick_restrict q; register ssize_t x; ssize_t u, v; if (status == MagickFalse) continue; p=GetCacheViewVirtualPixels(image_view,-((ssize_t) width/2L),y-(ssize_t) height/2L,image->columns+width,height,exception); q=GetCacheViewAuthenticPixels(threshold_view,0,y,threshold_image->columns,1, exception); if ((p == (const PixelPacket *) NULL) || (q == (PixelPacket *) NULL)) { status=MagickFalse; continue; } indexes=GetCacheViewVirtualIndexQueue(image_view); threshold_indexes=GetCacheViewAuthenticIndexQueue(threshold_view); channel_bias=zero; channel_sum=zero; r=p; for (v=0; v < (ssize_t) height; v++) { for (u=0; u < (ssize_t) width; u++) { if (u == (ssize_t) (width-1)) { channel_bias.red+=r[u].red; channel_bias.green+=r[u].green; channel_bias.blue+=r[u].blue; channel_bias.opacity+=r[u].opacity; if (image->colorspace == CMYKColorspace) channel_bias.index=(MagickRealType) GetPixelIndex(indexes+(r-p)+u); } channel_sum.red+=r[u].red; channel_sum.green+=r[u].green; channel_sum.blue+=r[u].blue; channel_sum.opacity+=r[u].opacity; if (image->colorspace == CMYKColorspace) channel_sum.index=(MagickRealType) GetPixelIndex(indexes+(r-p)+u); } r+=image->columns+width; } for (x=0; x < (ssize_t) image->columns; x++) { MagickPixelPacket mean; mean=zero; r=p; channel_sum.red-=channel_bias.red; channel_sum.green-=channel_bias.green; channel_sum.blue-=channel_bias.blue; channel_sum.opacity-=channel_bias.opacity; channel_sum.index-=channel_bias.index; channel_bias=zero; for (v=0; v < (ssize_t) height; v++) { channel_bias.red+=r[0].red; channel_bias.green+=r[0].green; channel_bias.blue+=r[0].blue; channel_bias.opacity+=r[0].opacity; if (image->colorspace == CMYKColorspace) channel_bias.index=(MagickRealType) GetPixelIndex(indexes+x+(r-p)+0); channel_sum.red+=r[width-1].red; channel_sum.green+=r[width-1].green; channel_sum.blue+=r[width-1].blue; channel_sum.opacity+=r[width-1].opacity; if (image->colorspace == CMYKColorspace) channel_sum.index=(MagickRealType) GetPixelIndex(indexes+x+(r-p)+ width-1); r+=image->columns+width; } mean.red=(MagickRealType) (channel_sum.red/number_pixels+offset); mean.green=(MagickRealType) (channel_sum.green/number_pixels+offset); mean.blue=(MagickRealType) (channel_sum.blue/number_pixels+offset); mean.opacity=(MagickRealType) (channel_sum.opacity/number_pixels+offset); if (image->colorspace == CMYKColorspace) mean.index=(MagickRealType) (channel_sum.index/number_pixels+offset); SetPixelRed(q,((MagickRealType) GetPixelRed(q) <= mean.red) ? 0 : QuantumRange); SetPixelGreen(q,((MagickRealType) GetPixelGreen(q) <= mean.green) ? 0 : QuantumRange); SetPixelBlue(q,((MagickRealType) GetPixelBlue(q) <= mean.blue) ? 0 : QuantumRange); SetPixelOpacity(q,((MagickRealType) GetPixelOpacity(q) <= mean.opacity) ? 0 : QuantumRange); if (image->colorspace == CMYKColorspace) SetPixelIndex(threshold_indexes+x,(((MagickRealType) GetPixelIndex( threshold_indexes+x) <= mean.index) ? 0 : QuantumRange)); p++; q++; } sync=SyncCacheViewAuthenticPixels(threshold_view,exception); if (sync == MagickFalse) status=MagickFalse; if (image->progress_monitor != (MagickProgressMonitor) NULL) { MagickBooleanType proceed; #if defined(MAGICKCORE_OPENMP_SUPPORT) #pragma omp atomic #endif progress++; proceed=SetImageProgress(image,ThresholdImageTag,progress,image->rows); if (proceed == MagickFalse) status=MagickFalse; } } threshold_view=DestroyCacheView(threshold_view); image_view=DestroyCacheView(image_view); if (status == MagickFalse) threshold_image=DestroyImage(threshold_image); return(threshold_image); }
MagickExport Image *AdaptiveThresholdImage(const Image *image, const size_t width,const size_t height,const ssize_t offset, ExceptionInfo *exception) { #define ThresholdImageTag "Threshold/Image" CacheView *image_view, *threshold_view; Image *threshold_image; MagickBooleanType status; MagickOffsetType progress; MagickPixelPacket zero; MagickRealType number_pixels; ssize_t y; assert(image != (const Image *) NULL); assert(image->signature == MagickCoreSignature); if (image->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s",image->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickCoreSignature); threshold_image=CloneImage(image,0,0,MagickTrue,exception); if (threshold_image == (Image *) NULL) return((Image *) NULL); if (width == 0) return(threshold_image); if (SetImageStorageClass(threshold_image,DirectClass) == MagickFalse) { InheritException(exception,&threshold_image->exception); threshold_image=DestroyImage(threshold_image); return((Image *) NULL); } /* Local adaptive threshold. */ status=MagickTrue; progress=0; GetMagickPixelPacket(image,&zero); number_pixels=(MagickRealType) (width*height); image_view=AcquireVirtualCacheView(image,exception); threshold_view=AcquireAuthenticCacheView(threshold_image,exception); #if defined(MAGICKCORE_OPENMP_SUPPORT) #pragma omp parallel for schedule(static) shared(progress,status) \ magick_number_threads(image,threshold_image,image->rows,1) #endif for (y=0; y < (ssize_t) image->rows; y++) { MagickBooleanType sync; MagickPixelPacket channel_bias, channel_sum; register const IndexPacket *magick_restrict indexes; register const PixelPacket *magick_restrict p, *magick_restrict r; register IndexPacket *magick_restrict threshold_indexes; register PixelPacket *magick_restrict q; register ssize_t x; ssize_t u, v; if (status == MagickFalse) continue; p=GetCacheViewVirtualPixels(image_view,-((ssize_t) width/2L),y-(ssize_t) height/2L,image->columns+width,height,exception); q=GetCacheViewAuthenticPixels(threshold_view,0,y,threshold_image->columns,1, exception); if ((p == (const PixelPacket *) NULL) || (q == (PixelPacket *) NULL)) { status=MagickFalse; continue; } indexes=GetCacheViewVirtualIndexQueue(image_view); threshold_indexes=GetCacheViewAuthenticIndexQueue(threshold_view); channel_bias=zero; channel_sum=zero; r=p; for (v=0; v < (ssize_t) height; v++) { for (u=0; u < (ssize_t) width; u++) { if (u == (ssize_t) (width-1)) { channel_bias.red+=r[u].red; channel_bias.green+=r[u].green; channel_bias.blue+=r[u].blue; channel_bias.opacity+=r[u].opacity; if (image->colorspace == CMYKColorspace) channel_bias.index=(MagickRealType) GetPixelIndex(indexes+(r-p)+u); } channel_sum.red+=r[u].red; channel_sum.green+=r[u].green; channel_sum.blue+=r[u].blue; channel_sum.opacity+=r[u].opacity; if (image->colorspace == CMYKColorspace) channel_sum.index=(MagickRealType) GetPixelIndex(indexes+(r-p)+u); } r+=image->columns+width; } for (x=0; x < (ssize_t) image->columns; x++) { MagickPixelPacket mean; mean=zero; r=p; channel_sum.red-=channel_bias.red; channel_sum.green-=channel_bias.green; channel_sum.blue-=channel_bias.blue; channel_sum.opacity-=channel_bias.opacity; channel_sum.index-=channel_bias.index; channel_bias=zero; for (v=0; v < (ssize_t) height; v++) { channel_bias.red+=r[0].red; channel_bias.green+=r[0].green; channel_bias.blue+=r[0].blue; channel_bias.opacity+=r[0].opacity; if (image->colorspace == CMYKColorspace) channel_bias.index=(MagickRealType) GetPixelIndex(indexes+x+(r-p)+0); channel_sum.red+=r[width-1].red; channel_sum.green+=r[width-1].green; channel_sum.blue+=r[width-1].blue; channel_sum.opacity+=r[width-1].opacity; if (image->colorspace == CMYKColorspace) channel_sum.index=(MagickRealType) GetPixelIndex(indexes+x+(r-p)+ width-1); r+=image->columns+width; } mean.red=(MagickRealType) (channel_sum.red/number_pixels+offset); mean.green=(MagickRealType) (channel_sum.green/number_pixels+offset); mean.blue=(MagickRealType) (channel_sum.blue/number_pixels+offset); mean.opacity=(MagickRealType) (channel_sum.opacity/number_pixels+offset); if (image->colorspace == CMYKColorspace) mean.index=(MagickRealType) (channel_sum.index/number_pixels+offset); SetPixelRed(q,((MagickRealType) GetPixelRed(q) <= mean.red) ? 0 : QuantumRange); SetPixelGreen(q,((MagickRealType) GetPixelGreen(q) <= mean.green) ? 0 : QuantumRange); SetPixelBlue(q,((MagickRealType) GetPixelBlue(q) <= mean.blue) ? 0 : QuantumRange); SetPixelOpacity(q,((MagickRealType) GetPixelOpacity(q) <= mean.opacity) ? 0 : QuantumRange); if (image->colorspace == CMYKColorspace) SetPixelIndex(threshold_indexes+x,(((MagickRealType) GetPixelIndex( threshold_indexes+x) <= mean.index) ? 0 : QuantumRange)); p++; q++; } sync=SyncCacheViewAuthenticPixels(threshold_view,exception); if (sync == MagickFalse) status=MagickFalse; if (image->progress_monitor != (MagickProgressMonitor) NULL) { MagickBooleanType proceed; #if defined(MAGICKCORE_OPENMP_SUPPORT) #pragma omp atomic #endif progress++; proceed=SetImageProgress(image,ThresholdImageTag,progress,image->rows); if (proceed == MagickFalse) status=MagickFalse; } } threshold_view=DestroyCacheView(threshold_view); image_view=DestroyCacheView(image_view); if (status == MagickFalse) threshold_image=DestroyImage(threshold_image); return(threshold_image); }
{ "deleted": [], "added": [ { "line_no": 38, "char_start": 875, "char_end": 893, "line": " if (width == 0)\n" }, { "line_no": 39, "char_start": 893, "char_end": 922, "line": " return(threshold_image);\n" } ] }
{ "deleted": [], "added": [ { "char_start": 881, "char_end": 928, "chars": "width == 0)\n return(threshold_image);\n if (" } ] }
github.com/ImageMagick/ImageMagick6/commit/55e6dc49f1a381d9d511ee2f888fdc3e3c3e3953
magick/threshold.c
cwe-125
SyncExifProfile
MagickBooleanType SyncExifProfile(Image *image,StringInfo *profile) { #define MaxDirectoryStack 16 #define EXIF_DELIMITER "\n" #define EXIF_NUM_FORMATS 12 #define TAG_EXIF_OFFSET 0x8769 #define TAG_INTEROP_OFFSET 0xa005 typedef struct _DirectoryInfo { unsigned char *directory; size_t entry; } DirectoryInfo; DirectoryInfo directory_stack[MaxDirectoryStack]; EndianType endian; size_t entry, length, number_entries; ssize_t id, level, offset; static int format_bytes[] = {0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8}; unsigned char *directory, *exif; /* Set EXIF resolution tag. */ length=GetStringInfoLength(profile); exif=GetStringInfoDatum(profile); if (length < 16) return(MagickFalse); id=(ssize_t) ReadProfileShort(LSBEndian,exif); if ((id != 0x4949) && (id != 0x4D4D)) { while (length != 0) { if (ReadProfileByte(&exif,&length) != 0x45) continue; if (ReadProfileByte(&exif,&length) != 0x78) continue; if (ReadProfileByte(&exif,&length) != 0x69) continue; if (ReadProfileByte(&exif,&length) != 0x66) continue; if (ReadProfileByte(&exif,&length) != 0x00) continue; if (ReadProfileByte(&exif,&length) != 0x00) continue; break; } if (length < 16) return(MagickFalse); id=(ssize_t) ReadProfileShort(LSBEndian,exif); } endian=LSBEndian; if (id == 0x4949) endian=LSBEndian; else if (id == 0x4D4D) endian=MSBEndian; else return(MagickFalse); if (ReadProfileShort(endian,exif+2) != 0x002a) return(MagickFalse); /* This the offset to the first IFD. */ offset=(ssize_t) ReadProfileLong(endian,exif+4); if ((offset < 0) || (size_t) offset >= length) return(MagickFalse); directory=exif+offset; level=0; entry=0; do { if (level > 0) { level--; directory=directory_stack[level].directory; entry=directory_stack[level].entry; } if ((directory < exif) || (directory > (exif+length-2))) break; /* Determine how many entries there are in the current IFD. */ number_entries=ReadProfileShort(endian,directory); for ( ; entry < number_entries; entry++) { int components; register unsigned char *p, *q; size_t number_bytes; ssize_t format, tag_value; q=(unsigned char *) (directory+2+(12*entry)); if (q > (exif+length-12)) break; /* corrupt EXIF */ tag_value=(ssize_t) ReadProfileShort(endian,q); format=(ssize_t) ReadProfileShort(endian,q+2); if ((format-1) >= EXIF_NUM_FORMATS) break; components=(ssize_t) ReadProfileLong(endian,q+4); if (components < 0) break; /* corrupt EXIF */ number_bytes=(size_t) components*format_bytes[format]; if ((ssize_t) number_bytes < components) break; /* prevent overflow */ if (number_bytes <= 4) p=q+8; else { /* The directory entry contains an offset. */ offset=(ssize_t) ReadProfileLong(endian,q+8); if ((size_t) (offset+number_bytes) > length) continue; if (~length < number_bytes) continue; /* prevent overflow */ p=(unsigned char *) (exif+offset); } switch (tag_value) { case 0x011a: { (void) WriteProfileLong(endian,(size_t) (image->resolution.x+0.5),p); (void) WriteProfileLong(endian,1UL,p+4); break; } case 0x011b: { (void) WriteProfileLong(endian,(size_t) (image->resolution.y+0.5),p); (void) WriteProfileLong(endian,1UL,p+4); break; } case 0x0112: { if (number_bytes == 4) { (void) WriteProfileLong(endian,(size_t) image->orientation,p); break; } (void) WriteProfileShort(endian,(unsigned short) image->orientation, p); break; } case 0x0128: { if (number_bytes == 4) { (void) WriteProfileLong(endian,(size_t) (image->units+1),p); break; } (void) WriteProfileShort(endian,(unsigned short) (image->units+1),p); break; } default: break; } if ((tag_value == TAG_EXIF_OFFSET) || (tag_value == TAG_INTEROP_OFFSET)) { offset=(ssize_t) ReadProfileLong(endian,p); if (((size_t) offset < length) && (level < (MaxDirectoryStack-2))) { directory_stack[level].directory=directory; entry++; directory_stack[level].entry=entry; level++; directory_stack[level].directory=exif+offset; directory_stack[level].entry=0; level++; if ((directory+2+(12*number_entries)) > (exif+length)) break; offset=(ssize_t) ReadProfileLong(endian,directory+2+(12* number_entries)); if ((offset != 0) && ((size_t) offset < length) && (level < (MaxDirectoryStack-2))) { directory_stack[level].directory=exif+offset; directory_stack[level].entry=0; level++; } } break; } } } while (level > 0); return(MagickTrue); }
MagickBooleanType SyncExifProfile(Image *image,StringInfo *profile) { #define MaxDirectoryStack 16 #define EXIF_DELIMITER "\n" #define EXIF_NUM_FORMATS 12 #define TAG_EXIF_OFFSET 0x8769 #define TAG_INTEROP_OFFSET 0xa005 typedef struct _DirectoryInfo { unsigned char *directory; size_t entry; } DirectoryInfo; DirectoryInfo directory_stack[MaxDirectoryStack]; EndianType endian; size_t entry, length, number_entries; ssize_t id, level, offset; static int format_bytes[] = {0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8}; unsigned char *directory, *exif; /* Set EXIF resolution tag. */ length=GetStringInfoLength(profile); exif=GetStringInfoDatum(profile); if (length < 16) return(MagickFalse); id=(ssize_t) ReadProfileShort(LSBEndian,exif); if ((id != 0x4949) && (id != 0x4D4D)) { while (length != 0) { if (ReadProfileByte(&exif,&length) != 0x45) continue; if (ReadProfileByte(&exif,&length) != 0x78) continue; if (ReadProfileByte(&exif,&length) != 0x69) continue; if (ReadProfileByte(&exif,&length) != 0x66) continue; if (ReadProfileByte(&exif,&length) != 0x00) continue; if (ReadProfileByte(&exif,&length) != 0x00) continue; break; } if (length < 16) return(MagickFalse); id=(ssize_t) ReadProfileShort(LSBEndian,exif); } endian=LSBEndian; if (id == 0x4949) endian=LSBEndian; else if (id == 0x4D4D) endian=MSBEndian; else return(MagickFalse); if (ReadProfileShort(endian,exif+2) != 0x002a) return(MagickFalse); /* This the offset to the first IFD. */ offset=(ssize_t) ReadProfileLong(endian,exif+4); if ((offset < 0) || (size_t) offset >= length) return(MagickFalse); directory=exif+offset; level=0; entry=0; do { if (level > 0) { level--; directory=directory_stack[level].directory; entry=directory_stack[level].entry; } if ((directory < exif) || (directory > (exif+length-2))) break; /* Determine how many entries there are in the current IFD. */ number_entries=ReadProfileShort(endian,directory); for ( ; entry < number_entries; entry++) { int components; register unsigned char *p, *q; size_t number_bytes; ssize_t format, tag_value; q=(unsigned char *) (directory+2+(12*entry)); if (q > (exif+length-12)) break; /* corrupt EXIF */ tag_value=(ssize_t) ReadProfileShort(endian,q); format=(ssize_t) ReadProfileShort(endian,q+2); if ((format < 0) || ((format-1) >= EXIF_NUM_FORMATS)) break; components=(ssize_t) ReadProfileLong(endian,q+4); if (components < 0) break; /* corrupt EXIF */ number_bytes=(size_t) components*format_bytes[format]; if ((ssize_t) number_bytes < components) break; /* prevent overflow */ if (number_bytes <= 4) p=q+8; else { /* The directory entry contains an offset. */ offset=(ssize_t) ReadProfileLong(endian,q+8); if ((size_t) (offset+number_bytes) > length) continue; if (~length < number_bytes) continue; /* prevent overflow */ p=(unsigned char *) (exif+offset); } switch (tag_value) { case 0x011a: { (void) WriteProfileLong(endian,(size_t) (image->resolution.x+0.5),p); (void) WriteProfileLong(endian,1UL,p+4); break; } case 0x011b: { (void) WriteProfileLong(endian,(size_t) (image->resolution.y+0.5),p); (void) WriteProfileLong(endian,1UL,p+4); break; } case 0x0112: { if (number_bytes == 4) { (void) WriteProfileLong(endian,(size_t) image->orientation,p); break; } (void) WriteProfileShort(endian,(unsigned short) image->orientation, p); break; } case 0x0128: { if (number_bytes == 4) { (void) WriteProfileLong(endian,(size_t) (image->units+1),p); break; } (void) WriteProfileShort(endian,(unsigned short) (image->units+1),p); break; } default: break; } if ((tag_value == TAG_EXIF_OFFSET) || (tag_value == TAG_INTEROP_OFFSET)) { offset=(ssize_t) ReadProfileLong(endian,p); if (((size_t) offset < length) && (level < (MaxDirectoryStack-2))) { directory_stack[level].directory=directory; entry++; directory_stack[level].entry=entry; level++; directory_stack[level].directory=exif+offset; directory_stack[level].entry=0; level++; if ((directory+2+(12*number_entries)) > (exif+length)) break; offset=(ssize_t) ReadProfileLong(endian,directory+2+(12* number_entries)); if ((offset != 0) && ((size_t) offset < length) && (level < (MaxDirectoryStack-2))) { directory_stack[level].directory=exif+offset; directory_stack[level].entry=0; level++; } } break; } } } while (level > 0); return(MagickTrue); }
{ "deleted": [ { "line_no": 125, "char_start": 2750, "char_end": 2792, "line": " if ((format-1) >= EXIF_NUM_FORMATS)\n" } ], "added": [ { "line_no": 125, "char_start": 2750, "char_end": 2810, "line": " if ((format < 0) || ((format-1) >= EXIF_NUM_FORMATS))\n" } ] }
{ "deleted": [], "added": [ { "char_start": 2767, "char_end": 2784, "chars": " < 0) || ((format" }, { "char_start": 2807, "char_end": 2808, "chars": ")" } ] }
github.com/ImageMagick/ImageMagick/commit/a7bb158b7bedd1449a34432feb3a67c8f1873bfa
MagickCore/profile.c
cwe-125
ng_pkt
static int ng_pkt(git_pkt **out, const char *line, size_t len) { git_pkt_ng *pkt; const char *ptr; size_t alloclen; pkt = git__malloc(sizeof(*pkt)); GITERR_CHECK_ALLOC(pkt); pkt->ref = NULL; pkt->type = GIT_PKT_NG; line += 3; /* skip "ng " */ if (!(ptr = strchr(line, ' '))) goto out_err; len = ptr - line; GITERR_CHECK_ALLOC_ADD(&alloclen, len, 1); pkt->ref = git__malloc(alloclen); GITERR_CHECK_ALLOC(pkt->ref); memcpy(pkt->ref, line, len); pkt->ref[len] = '\0'; line = ptr + 1; if (!(ptr = strchr(line, '\n'))) goto out_err; len = ptr - line; GITERR_CHECK_ALLOC_ADD(&alloclen, len, 1); pkt->msg = git__malloc(alloclen); GITERR_CHECK_ALLOC(pkt->msg); memcpy(pkt->msg, line, len); pkt->msg[len] = '\0'; *out = (git_pkt *)pkt; return 0; out_err: giterr_set(GITERR_NET, "invalid packet line"); git__free(pkt->ref); git__free(pkt); return -1; }
static int ng_pkt(git_pkt **out, const char *line, size_t len) { git_pkt_ng *pkt; const char *ptr; size_t alloclen; pkt = git__malloc(sizeof(*pkt)); GITERR_CHECK_ALLOC(pkt); pkt->ref = NULL; pkt->type = GIT_PKT_NG; if (len < 3) goto out_err; line += 3; /* skip "ng " */ len -= 3; if (!(ptr = memchr(line, ' ', len))) goto out_err; len = ptr - line; GITERR_CHECK_ALLOC_ADD(&alloclen, len, 1); pkt->ref = git__malloc(alloclen); GITERR_CHECK_ALLOC(pkt->ref); memcpy(pkt->ref, line, len); pkt->ref[len] = '\0'; if (len < 1) goto out_err; line = ptr + 1; len -= 1; if (!(ptr = memchr(line, '\n', len))) goto out_err; len = ptr - line; GITERR_CHECK_ALLOC_ADD(&alloclen, len, 1); pkt->msg = git__malloc(alloclen); GITERR_CHECK_ALLOC(pkt->msg); memcpy(pkt->msg, line, len); pkt->msg[len] = '\0'; *out = (git_pkt *)pkt; return 0; out_err: giterr_set(GITERR_NET, "invalid packet line"); git__free(pkt->ref); git__free(pkt); return -1; }
{ "deleted": [ { "line_no": 14, "char_start": 254, "char_end": 287, "line": "\tif (!(ptr = strchr(line, ' ')))\n" }, { "line_no": 26, "char_start": 505, "char_end": 539, "line": "\tif (!(ptr = strchr(line, '\\n')))\n" } ], "added": [ { "line_no": 13, "char_start": 225, "char_end": 239, "line": "\tif (len < 3)\n" }, { "line_no": 14, "char_start": 239, "char_end": 255, "line": "\t\tgoto out_err;\n" }, { "line_no": 16, "char_start": 284, "char_end": 295, "line": "\tlen -= 3;\n" }, { "line_no": 17, "char_start": 295, "char_end": 333, "line": "\tif (!(ptr = memchr(line, ' ', len)))\n" }, { "line_no": 28, "char_start": 534, "char_end": 548, "line": "\tif (len < 1)\n" }, { "line_no": 29, "char_start": 548, "char_end": 564, "line": "\t\tgoto out_err;\n" }, { "line_no": 31, "char_start": 581, "char_end": 592, "line": "\tlen -= 1;\n" }, { "line_no": 32, "char_start": 592, "char_end": 631, "line": "\tif (!(ptr = memchr(line, '\\n', len)))\n" } ] }
{ "deleted": [ { "char_start": 267, "char_end": 270, "chars": "str" }, { "char_start": 518, "char_end": 521, "chars": "str" } ], "added": [ { "char_start": 226, "char_end": 256, "chars": "if (len < 3)\n\t\tgoto out_err;\n\t" }, { "char_start": 285, "char_end": 296, "chars": "len -= 3;\n\t" }, { "char_start": 308, "char_end": 311, "chars": "mem" }, { "char_start": 324, "char_end": 329, "chars": ", len" }, { "char_start": 535, "char_end": 565, "chars": "if (len < 1)\n\t\tgoto out_err;\n\t" }, { "char_start": 582, "char_end": 593, "chars": "len -= 1;\n\t" }, { "char_start": 605, "char_end": 608, "chars": "mem" }, { "char_start": 622, "char_end": 627, "chars": ", len" } ] }
github.com/libgit2/libgit2/commit/1f9a8510e1d2f20ed7334eeeddb92c4dd8e7c649
src/transports/smart_pkt.c
cwe-125
lha_read_file_header_1
lha_read_file_header_1(struct archive_read *a, struct lha *lha) { const unsigned char *p; size_t extdsize; int i, err, err2; int namelen, padding; unsigned char headersum, sum_calculated; err = ARCHIVE_OK; if ((p = __archive_read_ahead(a, H1_FIXED_SIZE, NULL)) == NULL) return (truncated_error(a)); lha->header_size = p[H1_HEADER_SIZE_OFFSET] + 2; headersum = p[H1_HEADER_SUM_OFFSET]; /* Note: An extended header size is included in a compsize. */ lha->compsize = archive_le32dec(p + H1_COMP_SIZE_OFFSET); lha->origsize = archive_le32dec(p + H1_ORIG_SIZE_OFFSET); lha->mtime = lha_dos_time(p + H1_DOS_TIME_OFFSET); namelen = p[H1_NAME_LEN_OFFSET]; /* Calculate a padding size. The result will be normally 0 only(?) */ padding = ((int)lha->header_size) - H1_FIXED_SIZE - namelen; if (namelen > 230 || padding < 0) goto invalid; if ((p = __archive_read_ahead(a, lha->header_size, NULL)) == NULL) return (truncated_error(a)); for (i = 0; i < namelen; i++) { if (p[i + H1_FILE_NAME_OFFSET] == 0xff) goto invalid;/* Invalid filename. */ } archive_strncpy(&lha->filename, p + H1_FILE_NAME_OFFSET, namelen); lha->crc = archive_le16dec(p + H1_FILE_NAME_OFFSET + namelen); lha->setflag |= CRC_IS_SET; sum_calculated = lha_calcsum(0, p, 2, lha->header_size - 2); /* Consume used bytes but not include `next header size' data * since it will be consumed in lha_read_file_extended_header(). */ __archive_read_consume(a, lha->header_size - 2); /* Read extended headers */ err2 = lha_read_file_extended_header(a, lha, NULL, 2, (size_t)(lha->compsize + 2), &extdsize); if (err2 < ARCHIVE_WARN) return (err2); if (err2 < err) err = err2; /* Get a real compressed file size. */ lha->compsize -= extdsize - 2; if (sum_calculated != headersum) { archive_set_error(&a->archive, ARCHIVE_ERRNO_MISC, "LHa header sum error"); return (ARCHIVE_FATAL); } return (err); invalid: archive_set_error(&a->archive, ARCHIVE_ERRNO_FILE_FORMAT, "Invalid LHa header"); return (ARCHIVE_FATAL); }
lha_read_file_header_1(struct archive_read *a, struct lha *lha) { const unsigned char *p; size_t extdsize; int i, err, err2; int namelen, padding; unsigned char headersum, sum_calculated; err = ARCHIVE_OK; if ((p = __archive_read_ahead(a, H1_FIXED_SIZE, NULL)) == NULL) return (truncated_error(a)); lha->header_size = p[H1_HEADER_SIZE_OFFSET] + 2; headersum = p[H1_HEADER_SUM_OFFSET]; /* Note: An extended header size is included in a compsize. */ lha->compsize = archive_le32dec(p + H1_COMP_SIZE_OFFSET); lha->origsize = archive_le32dec(p + H1_ORIG_SIZE_OFFSET); lha->mtime = lha_dos_time(p + H1_DOS_TIME_OFFSET); namelen = p[H1_NAME_LEN_OFFSET]; /* Calculate a padding size. The result will be normally 0 only(?) */ padding = ((int)lha->header_size) - H1_FIXED_SIZE - namelen; if (namelen > 230 || padding < 0) goto invalid; if ((p = __archive_read_ahead(a, lha->header_size, NULL)) == NULL) return (truncated_error(a)); for (i = 0; i < namelen; i++) { if (p[i + H1_FILE_NAME_OFFSET] == 0xff) goto invalid;/* Invalid filename. */ } archive_strncpy(&lha->filename, p + H1_FILE_NAME_OFFSET, namelen); lha->crc = archive_le16dec(p + H1_FILE_NAME_OFFSET + namelen); lha->setflag |= CRC_IS_SET; sum_calculated = lha_calcsum(0, p, 2, lha->header_size - 2); /* Consume used bytes but not include `next header size' data * since it will be consumed in lha_read_file_extended_header(). */ __archive_read_consume(a, lha->header_size - 2); /* Read extended headers */ err2 = lha_read_file_extended_header(a, lha, NULL, 2, (size_t)(lha->compsize + 2), &extdsize); if (err2 < ARCHIVE_WARN) return (err2); if (err2 < err) err = err2; /* Get a real compressed file size. */ lha->compsize -= extdsize - 2; if (lha->compsize < 0) goto invalid; /* Invalid compressed file size */ if (sum_calculated != headersum) { archive_set_error(&a->archive, ARCHIVE_ERRNO_MISC, "LHa header sum error"); return (ARCHIVE_FATAL); } return (err); invalid: archive_set_error(&a->archive, ARCHIVE_ERRNO_FILE_FORMAT, "Invalid LHa header"); return (ARCHIVE_FATAL); }
{ "deleted": [], "added": [ { "line_no": 53, "char_start": 1755, "char_end": 1779, "line": "\tif (lha->compsize < 0)\n" }, { "line_no": 54, "char_start": 1779, "char_end": 1830, "line": "\t\tgoto invalid;\t/* Invalid compressed file size */\n" }, { "line_no": 55, "char_start": 1830, "char_end": 1831, "line": "\n" } ] }
{ "deleted": [], "added": [ { "char_start": 1760, "char_end": 1836, "chars": "lha->compsize < 0)\n\t\tgoto invalid;\t/* Invalid compressed file size */\n\n\tif (" } ] }
github.com/libarchive/libarchive/commit/98dcbbf0bf4854bf987557e55e55fff7abbf3ea9
libarchive/archive_read_support_format_lha.c
cwe-125
handle_eac3
static int handle_eac3(MOVMuxContext *mov, AVPacket *pkt, MOVTrack *track) { AC3HeaderInfo *hdr = NULL; struct eac3_info *info; int num_blocks, ret; if (!track->eac3_priv && !(track->eac3_priv = av_mallocz(sizeof(*info)))) return AVERROR(ENOMEM); info = track->eac3_priv; if (avpriv_ac3_parse_header(&hdr, pkt->data, pkt->size) < 0) { /* drop the packets until we see a good one */ if (!track->entry) { av_log(mov, AV_LOG_WARNING, "Dropping invalid packet from start of the stream\n"); ret = 0; } else ret = AVERROR_INVALIDDATA; goto end; } info->data_rate = FFMAX(info->data_rate, hdr->bit_rate / 1000); num_blocks = hdr->num_blocks; if (!info->ec3_done) { /* AC-3 substream must be the first one */ if (hdr->bitstream_id <= 10 && hdr->substreamid != 0) { ret = AVERROR(EINVAL); goto end; } /* this should always be the case, given that our AC-3 parser * concatenates dependent frames to their independent parent */ if (hdr->frame_type == EAC3_FRAME_TYPE_INDEPENDENT) { /* substream ids must be incremental */ if (hdr->substreamid > info->num_ind_sub + 1) { ret = AVERROR(EINVAL); goto end; } if (hdr->substreamid == info->num_ind_sub + 1) { //info->num_ind_sub++; avpriv_request_sample(track->par, "Multiple independent substreams"); ret = AVERROR_PATCHWELCOME; goto end; } else if (hdr->substreamid < info->num_ind_sub || hdr->substreamid == 0 && info->substream[0].bsid) { info->ec3_done = 1; goto concatenate; } } else { if (hdr->substreamid != 0) { avpriv_request_sample(mov->fc, "Multiple non EAC3 independent substreams"); ret = AVERROR_PATCHWELCOME; goto end; } } /* fill the info needed for the "dec3" atom */ info->substream[hdr->substreamid].fscod = hdr->sr_code; info->substream[hdr->substreamid].bsid = hdr->bitstream_id; info->substream[hdr->substreamid].bsmod = hdr->bitstream_mode; info->substream[hdr->substreamid].acmod = hdr->channel_mode; info->substream[hdr->substreamid].lfeon = hdr->lfe_on; /* Parse dependent substream(s), if any */ if (pkt->size != hdr->frame_size) { int cumul_size = hdr->frame_size; int parent = hdr->substreamid; while (cumul_size != pkt->size) { GetBitContext gbc; int i; ret = avpriv_ac3_parse_header(&hdr, pkt->data + cumul_size, pkt->size - cumul_size); if (ret < 0) goto end; if (hdr->frame_type != EAC3_FRAME_TYPE_DEPENDENT) { ret = AVERROR(EINVAL); goto end; } info->substream[parent].num_dep_sub++; ret /= 8; /* header is parsed up to lfeon, but custom channel map may be needed */ init_get_bits8(&gbc, pkt->data + cumul_size + ret, pkt->size - cumul_size - ret); /* skip bsid */ skip_bits(&gbc, 5); /* skip volume control params */ for (i = 0; i < (hdr->channel_mode ? 1 : 2); i++) { skip_bits(&gbc, 5); // skip dialog normalization if (get_bits1(&gbc)) { skip_bits(&gbc, 8); // skip compression gain word } } /* get the dependent stream channel map, if exists */ if (get_bits1(&gbc)) info->substream[parent].chan_loc |= (get_bits(&gbc, 16) >> 5) & 0x1f; else info->substream[parent].chan_loc |= hdr->channel_mode; cumul_size += hdr->frame_size; } } } concatenate: if (!info->num_blocks && num_blocks == 6) { ret = pkt->size; goto end; } else if (info->num_blocks + num_blocks > 6) { ret = AVERROR_INVALIDDATA; goto end; } if (!info->num_blocks) { ret = av_packet_ref(&info->pkt, pkt); if (!ret) info->num_blocks = num_blocks; goto end; } else { if ((ret = av_grow_packet(&info->pkt, pkt->size)) < 0) goto end; memcpy(info->pkt.data + info->pkt.size - pkt->size, pkt->data, pkt->size); info->num_blocks += num_blocks; info->pkt.duration += pkt->duration; if ((ret = av_copy_packet_side_data(&info->pkt, pkt)) < 0) goto end; if (info->num_blocks != 6) goto end; av_packet_unref(pkt); av_packet_move_ref(pkt, &info->pkt); info->num_blocks = 0; } ret = pkt->size; end: av_free(hdr); return ret; }
static int handle_eac3(MOVMuxContext *mov, AVPacket *pkt, MOVTrack *track) { AC3HeaderInfo *hdr = NULL; struct eac3_info *info; int num_blocks, ret; if (!track->eac3_priv && !(track->eac3_priv = av_mallocz(sizeof(*info)))) return AVERROR(ENOMEM); info = track->eac3_priv; if (avpriv_ac3_parse_header(&hdr, pkt->data, pkt->size) < 0) { /* drop the packets until we see a good one */ if (!track->entry) { av_log(mov, AV_LOG_WARNING, "Dropping invalid packet from start of the stream\n"); ret = 0; } else ret = AVERROR_INVALIDDATA; goto end; } info->data_rate = FFMAX(info->data_rate, hdr->bit_rate / 1000); num_blocks = hdr->num_blocks; if (!info->ec3_done) { /* AC-3 substream must be the first one */ if (hdr->bitstream_id <= 10 && hdr->substreamid != 0) { ret = AVERROR(EINVAL); goto end; } /* this should always be the case, given that our AC-3 parser * concatenates dependent frames to their independent parent */ if (hdr->frame_type == EAC3_FRAME_TYPE_INDEPENDENT) { /* substream ids must be incremental */ if (hdr->substreamid > info->num_ind_sub + 1) { ret = AVERROR(EINVAL); goto end; } if (hdr->substreamid == info->num_ind_sub + 1) { //info->num_ind_sub++; avpriv_request_sample(mov->fc, "Multiple independent substreams"); ret = AVERROR_PATCHWELCOME; goto end; } else if (hdr->substreamid < info->num_ind_sub || hdr->substreamid == 0 && info->substream[0].bsid) { info->ec3_done = 1; goto concatenate; } } else { if (hdr->substreamid != 0) { avpriv_request_sample(mov->fc, "Multiple non EAC3 independent substreams"); ret = AVERROR_PATCHWELCOME; goto end; } } /* fill the info needed for the "dec3" atom */ info->substream[hdr->substreamid].fscod = hdr->sr_code; info->substream[hdr->substreamid].bsid = hdr->bitstream_id; info->substream[hdr->substreamid].bsmod = hdr->bitstream_mode; info->substream[hdr->substreamid].acmod = hdr->channel_mode; info->substream[hdr->substreamid].lfeon = hdr->lfe_on; /* Parse dependent substream(s), if any */ if (pkt->size != hdr->frame_size) { int cumul_size = hdr->frame_size; int parent = hdr->substreamid; while (cumul_size != pkt->size) { GetBitContext gbc; int i; ret = avpriv_ac3_parse_header(&hdr, pkt->data + cumul_size, pkt->size - cumul_size); if (ret < 0) goto end; if (hdr->frame_type != EAC3_FRAME_TYPE_DEPENDENT) { ret = AVERROR(EINVAL); goto end; } info->substream[parent].num_dep_sub++; ret /= 8; /* header is parsed up to lfeon, but custom channel map may be needed */ init_get_bits8(&gbc, pkt->data + cumul_size + ret, pkt->size - cumul_size - ret); /* skip bsid */ skip_bits(&gbc, 5); /* skip volume control params */ for (i = 0; i < (hdr->channel_mode ? 1 : 2); i++) { skip_bits(&gbc, 5); // skip dialog normalization if (get_bits1(&gbc)) { skip_bits(&gbc, 8); // skip compression gain word } } /* get the dependent stream channel map, if exists */ if (get_bits1(&gbc)) info->substream[parent].chan_loc |= (get_bits(&gbc, 16) >> 5) & 0x1f; else info->substream[parent].chan_loc |= hdr->channel_mode; cumul_size += hdr->frame_size; } } } concatenate: if (!info->num_blocks && num_blocks == 6) { ret = pkt->size; goto end; } else if (info->num_blocks + num_blocks > 6) { ret = AVERROR_INVALIDDATA; goto end; } if (!info->num_blocks) { ret = av_packet_ref(&info->pkt, pkt); if (!ret) info->num_blocks = num_blocks; goto end; } else { if ((ret = av_grow_packet(&info->pkt, pkt->size)) < 0) goto end; memcpy(info->pkt.data + info->pkt.size - pkt->size, pkt->data, pkt->size); info->num_blocks += num_blocks; info->pkt.duration += pkt->duration; if ((ret = av_copy_packet_side_data(&info->pkt, pkt)) < 0) goto end; if (info->num_blocks != 6) goto end; av_packet_unref(pkt); av_packet_move_ref(pkt, &info->pkt); info->num_blocks = 0; } ret = pkt->size; end: av_free(hdr); return ret; }
{ "deleted": [ { "line_no": 42, "char_start": 1457, "char_end": 1543, "line": " avpriv_request_sample(track->par, \"Multiple independent substreams\");\n" } ], "added": [ { "line_no": 42, "char_start": 1457, "char_end": 1540, "line": " avpriv_request_sample(mov->fc, \"Multiple independent substreams\");\n" } ] }
{ "deleted": [ { "char_start": 1495, "char_end": 1500, "chars": "track" }, { "char_start": 1502, "char_end": 1505, "chars": "par" } ], "added": [ { "char_start": 1495, "char_end": 1498, "chars": "mov" }, { "char_start": 1500, "char_end": 1502, "chars": "fc" } ] }
github.com/FFmpeg/FFmpeg/commit/95556e27e2c1d56d9e18f5db34d6f756f3011148
libavformat/movenc.c
cwe-125
set_fdc
static void set_fdc(int drive) { if (drive >= 0 && drive < N_DRIVE) { fdc = FDC(drive); current_drive = drive; } if (fdc != 1 && fdc != 0) { pr_info("bad fdc value\n"); return; } set_dor(fdc, ~0, 8); #if N_FDC > 1 set_dor(1 - fdc, ~8, 0); #endif if (FDCS->rawcmd == 2) reset_fdc_info(1); if (fd_inb(FD_STATUS) != STATUS_READY) FDCS->reset = 1; }
static void set_fdc(int drive) { unsigned int new_fdc = fdc; if (drive >= 0 && drive < N_DRIVE) { new_fdc = FDC(drive); current_drive = drive; } if (new_fdc >= N_FDC) { pr_info("bad fdc value\n"); return; } fdc = new_fdc; set_dor(fdc, ~0, 8); #if N_FDC > 1 set_dor(1 - fdc, ~8, 0); #endif if (FDCS->rawcmd == 2) reset_fdc_info(1); if (fd_inb(FD_STATUS) != STATUS_READY) FDCS->reset = 1; }
{ "deleted": [ { "line_no": 4, "char_start": 71, "char_end": 91, "line": "\t\tfdc = FDC(drive);\n" }, { "line_no": 7, "char_start": 119, "char_end": 148, "line": "\tif (fdc != 1 && fdc != 0) {\n" } ], "added": [ { "line_no": 3, "char_start": 33, "char_end": 62, "line": "\tunsigned int new_fdc = fdc;\n" }, { "line_no": 4, "char_start": 62, "char_end": 63, "line": "\n" }, { "line_no": 6, "char_start": 101, "char_end": 125, "line": "\t\tnew_fdc = FDC(drive);\n" }, { "line_no": 9, "char_start": 153, "char_end": 178, "line": "\tif (new_fdc >= N_FDC) {\n" }, { "line_no": 13, "char_start": 221, "char_end": 237, "line": "\tfdc = new_fdc;\n" } ] }
{ "deleted": [ { "char_start": 128, "char_end": 129, "chars": "!" }, { "char_start": 131, "char_end": 144, "chars": "1 && fdc != 0" } ], "added": [ { "char_start": 34, "char_end": 64, "chars": "unsigned int new_fdc = fdc;\n\n\t" }, { "char_start": 103, "char_end": 107, "chars": "new_" }, { "char_start": 158, "char_end": 162, "chars": "new_" }, { "char_start": 166, "char_end": 167, "chars": ">" }, { "char_start": 169, "char_end": 174, "chars": "N_FDC" }, { "char_start": 220, "char_end": 236, "chars": "\n\tfdc = new_fdc;" } ] }
github.com/torvalds/linux/commit/2e90ca68b0d2f5548804f22f0dd61145516171e3
drivers/block/floppy.c
cwe-125
opmov
static int opmov(RAsm *a, ut8 *data, const Opcode *op) { int l = 0; st64 offset = 0; int mod = 0; int base = 0; int rex = 0; ut64 immediate = 0; if (op->operands[1].type & OT_CONSTANT) { if (!op->operands[1].is_good_flag) { return -1; } if (op->operands[1].immediate == -1) { return -1; } immediate = op->operands[1].immediate * op->operands[1].sign; if (op->operands[0].type & OT_GPREG && !(op->operands[0].type & OT_MEMORY)) { if (a->bits == 64 && ((op->operands[0].type & OT_QWORD) | (op->operands[1].type & OT_QWORD))) { if (!(op->operands[1].type & OT_CONSTANT) && op->operands[1].extended) { data[l++] = 0x49; } else { data[l++] = 0x48; } } else if (op->operands[0].extended) { data[l++] = 0x41; } if (op->operands[0].type & OT_WORD) { if (a->bits > 16) { data[l++] = 0x66; } } if (op->operands[0].type & OT_BYTE) { data[l++] = 0xb0 | op->operands[0].reg; data[l++] = immediate; } else { if (a->bits == 64 && ((op->operands[0].type & OT_QWORD) | (op->operands[1].type & OT_QWORD)) && immediate < UT32_MAX) { data[l++] = 0xc7; data[l++] = 0xc0 | op->operands[0].reg; } else { data[l++] = 0xb8 | op->operands[0].reg; } data[l++] = immediate; data[l++] = immediate >> 8; if (!(op->operands[0].type & OT_WORD)) { data[l++] = immediate >> 16; data[l++] = immediate >> 24; } if (a->bits == 64 && immediate > UT32_MAX) { data[l++] = immediate >> 32; data[l++] = immediate >> 40; data[l++] = immediate >> 48; data[l++] = immediate >> 56; } } } else if (op->operands[0].type & OT_MEMORY) { if (!op->operands[0].explicit_size) { if (op->operands[0].type & OT_GPREG) { ((Opcode *)op)->operands[0].dest_size = op->operands[0].reg_size; } else { return -1; } } int dest_bits = 8 * ((op->operands[0].dest_size & ALL_SIZE) >> OPSIZE_SHIFT); int reg_bits = 8 * ((op->operands[0].reg_size & ALL_SIZE) >> OPSIZE_SHIFT); int offset = op->operands[0].offset * op->operands[0].offset_sign; //addr_size_override prefix bool use_aso = false; if (reg_bits < a->bits) { use_aso = true; } //op_size_override prefix bool use_oso = false; if (dest_bits == 16) { use_oso = true; } bool rip_rel = op->operands[0].regs[0] == X86R_RIP; //rex prefix int rex = 1 << 6; bool use_rex = false; if (dest_bits == 64) { //W field use_rex = true; rex |= 1 << 3; } if (op->operands[0].extended) { //B field use_rex = true; rex |= 1; } //opcode selection int opcode; if (dest_bits == 8) { opcode = 0xc6; } else { opcode = 0xc7; } //modrm and SIB selection int modrm = 0; int mod; int reg = 0; int rm; bool use_sib = false; int sib; //mod if (offset == 0) { mod = 0; } else if (offset < 128 && offset > -129) { mod = 1; } else { mod = 2; } if (reg_bits == 16) { if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == X86R_SI) { rm = B0000; } else if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == X86R_DI) { rm = B0001; } else if (op->operands[0].regs[0] == X86R_BP && op->operands[0].regs[1] == X86R_SI) { rm = B0010; } else if (op->operands[0].regs[0] == X86R_BP && op->operands[0].regs[1] == X86R_DI) { rm = B0011; } else if (op->operands[0].regs[0] == X86R_SI && op->operands[0].regs[1] == -1) { rm = B0100; } else if (op->operands[0].regs[0] == X86R_DI && op->operands[0].regs[1] == -1) { rm = B0101; } else if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == -1) { rm = B0111; } else { //TODO allow for displacement only when parser is reworked return -1; } modrm = (mod << 6) | (reg << 3) | rm; } else { //rm if (op->operands[0].extended) { rm = op->operands[0].reg; } else { rm = op->operands[0].regs[0]; } //[epb] alone is illegal, so we need to fake a [ebp+0] if (rm == 5 && mod == 0) { mod = 1; } //sib int index = op->operands[0].regs[1]; int scale = getsib(op->operands[0].scale[1]); if (index != -1) { use_sib = true; sib = (scale << 6) | (index << 3) | rm; } else if (rm == 4) { use_sib = true; sib = 0x24; } if (use_sib) { rm = B0100; } if (rip_rel) { modrm = (B0000 << 6) | (reg << 3) | B0101; sib = (scale << 6) | (B0100 << 3) | B0101; } else { modrm = (mod << 6) | (reg << 3) | rm; } } //build the final result if (use_aso) { data[l++] = 0x67; } if (use_oso) { data[l++] = 0x66; } if (use_rex) { data[l++] = rex; } data[l++] = opcode; data[l++] = modrm; if (use_sib) { data[l++] = sib; } //offset if (mod == 1) { data[l++] = offset; } else if (reg_bits == 16 && mod == 2) { data[l++] = offset; data[l++] = offset >> 8; } else if (mod == 2 || rip_rel) { data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } //immediate int byte; for (byte = 0; byte < dest_bits && byte < 32; byte += 8) { data[l++] = (immediate >> byte); } } } else if (op->operands[1].type & OT_REGALL && !(op->operands[1].type & OT_MEMORY)) { if (op->operands[0].type & OT_CONSTANT) { return -1; } if (op->operands[0].type & OT_REGTYPE & OT_SEGMENTREG && op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { return -1; } // Check reg sizes match if (op->operands[0].type & OT_REGTYPE && op->operands[1].type & OT_REGTYPE) { if (!((op->operands[0].type & ALL_SIZE) & (op->operands[1].type & ALL_SIZE))) { return -1; } } if (a->bits == 64) { if (op->operands[0].extended) { rex = 1; } if (op->operands[1].extended) { rex += 4; } if (op->operands[1].type & OT_QWORD) { if (!(op->operands[0].type & OT_QWORD)) { data[l++] = 0x67; data[l++] = 0x48; } } if (op->operands[1].type & OT_QWORD && op->operands[0].type & OT_QWORD) { data[l++] = 0x48 | rex; } if (op->operands[1].type & OT_DWORD && op->operands[0].type & OT_DWORD) { data[l++] = 0x40 | rex; } } else if (op->operands[0].extended && op->operands[1].extended) { data[l++] = 0x45; } offset = op->operands[0].offset * op->operands[0].offset_sign; if (op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { data[l++] = 0x8c; } else { if (op->operands[0].type & OT_WORD) { data[l++] = 0x66; } data[l++] = (op->operands[0].type & OT_BYTE) ? 0x88 : 0x89; } if (op->operands[0].scale[0] > 1) { data[l++] = op->operands[1].reg << 3 | 4; data[l++] = getsib (op->operands[0].scale[0]) << 6 | op->operands[0].regs[0] << 3 | 5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; return l; } if (!(op->operands[0].type & OT_MEMORY)) { if (op->operands[0].reg == X86R_UNDEFINED || op->operands[1].reg == X86R_UNDEFINED) { return -1; } mod = 0x3; data[l++] = mod << 6 | op->operands[1].reg << 3 | op->operands[0].reg; } else if (op->operands[0].regs[0] == X86R_UNDEFINED) { data[l++] = op->operands[1].reg << 3 | 0x5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } else { if (op->operands[0].type & OT_MEMORY) { if (op->operands[0].regs[1] != X86R_UNDEFINED) { data[l++] = op->operands[1].reg << 3 | 0x4; data[l++] = op->operands[0].regs[1] << 3 | op->operands[0].regs[0]; return l; } if (offset) { mod = (offset > 128 || offset < -129) ? 0x2 : 0x1; } if (op->operands[0].regs[0] == X86R_EBP) { mod = 0x2; } data[l++] = mod << 6 | op->operands[1].reg << 3 | op->operands[0].regs[0]; if (op->operands[0].regs[0] == X86R_ESP) { data[l++] = 0x24; } if (offset) { data[l++] = offset; } if (mod == 2) { // warning C4293: '>>': shift count negative or too big, undefined behavior data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } } } else if (op->operands[1].type & OT_MEMORY) { if (op->operands[0].type & OT_MEMORY) { return -1; } offset = op->operands[1].offset * op->operands[1].offset_sign; if (op->operands[0].reg == X86R_EAX && op->operands[1].regs[0] == X86R_UNDEFINED) { if (a->bits == 64) { data[l++] = 0x48; } if (op->operands[0].type & OT_BYTE) { data[l++] = 0xa0; } else { data[l++] = 0xa1; } data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; if (a->bits == 64) { data[l++] = offset >> 32; data[l++] = offset >> 40; data[l++] = offset >> 48; data[l++] = offset >> 54; } return l; } if (op->operands[0].type & OT_BYTE && a->bits == 64 && op->operands[1].regs[0]) { if (op->operands[1].regs[0] >= X86R_R8 && op->operands[0].reg < 4) { data[l++] = 0x41; data[l++] = 0x8a; data[l++] = op->operands[0].reg << 3 | (op->operands[1].regs[0] - 8); return l; } return -1; } if (op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { if (op->operands[1].scale[0] == 0) { return -1; } data[l++] = SEG_REG_PREFIXES[op->operands[1].regs[0]]; data[l++] = 0x8b; data[l++] = op->operands[0].reg << 3 | 0x5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; return l; } if (a->bits == 64) { if (op->operands[0].type & OT_QWORD) { if (!(op->operands[1].type & OT_QWORD)) { if (op->operands[1].regs[0] != -1) { data[l++] = 0x67; } data[l++] = 0x48; } } else if (op->operands[1].type & OT_DWORD) { data[l++] = 0x44; } else if (!(op->operands[1].type & OT_QWORD)) { data[l++] = 0x67; } if (op->operands[1].type & OT_QWORD && op->operands[0].type & OT_QWORD) { data[l++] = 0x48; } } if (op->operands[0].type & OT_WORD) { data[l++] = 0x66; data[l++] = op->operands[1].type & OT_BYTE ? 0x8a : 0x8b; } else { data[l++] = (op->operands[1].type & OT_BYTE || op->operands[0].type & OT_BYTE) ? 0x8a : 0x8b; } if (op->operands[1].regs[0] == X86R_UNDEFINED) { if (a->bits == 64) { data[l++] = op->operands[0].reg << 3 | 0x4; data[l++] = 0x25; } else { data[l++] = op->operands[0].reg << 3 | 0x5; } data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } else { if (op->operands[1].scale[0] > 1) { data[l++] = op->operands[0].reg << 3 | 4; if (op->operands[1].scale[0] >= 2) { base = 5; } if (base) { data[l++] = getsib (op->operands[1].scale[0]) << 6 | op->operands[1].regs[0] << 3 | base; } else { data[l++] = getsib (op->operands[1].scale[0]) << 3 | op->operands[1].regs[0]; } if (offset || base) { data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } return l; } if (op->operands[1].regs[1] != X86R_UNDEFINED) { data[l++] = op->operands[0].reg << 3 | 0x4; data[l++] = op->operands[1].regs[1] << 3 | op->operands[1].regs[0]; return l; } if (offset || op->operands[1].regs[0] == X86R_EBP) { mod = 0x2; if (op->operands[1].offset > 127) { mod = 0x4; } } if (a->bits == 64 && offset && op->operands[0].type & OT_QWORD) { if (op->operands[1].regs[0] == X86R_RIP) { data[l++] = 0x5; } else { if (op->operands[1].offset > 127) { data[l++] = 0x80 | op->operands[0].reg << 3 | op->operands[1].regs[0]; } else { data[l++] = 0x40 | op->operands[1].regs[0]; } } if (op->operands[1].offset > 127) { mod = 0x1; } } else { if (op->operands[1].regs[0] == X86R_EIP && (op->operands[0].type & OT_DWORD)) { data[l++] = 0x0d; } else if (op->operands[1].regs[0] == X86R_RIP && (op->operands[0].type & OT_QWORD)) { data[l++] = 0x05; } else { data[l++] = mod << 5 | op->operands[0].reg << 3 | op->operands[1].regs[0]; } } if (op->operands[1].regs[0] == X86R_ESP) { data[l++] = 0x24; } if (mod >= 0x2) { data[l++] = offset; if (op->operands[1].offset > 128 || op->operands[1].regs[0] == X86R_EIP) { data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } else if (a->bits == 64 && (offset || op->operands[1].regs[0] == X86R_RIP)) { data[l++] = offset; if (op->operands[1].offset > 127 || op->operands[1].regs[0] == X86R_RIP) { data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } } } return l; }
static int opmov(RAsm *a, ut8 *data, const Opcode *op) { int l = 0; st64 offset = 0; int mod = 0; int base = 0; int rex = 0; ut64 immediate = 0; if (op->operands[1].type & OT_CONSTANT) { if (!op->operands[1].is_good_flag) { return -1; } if (op->operands[1].immediate == -1) { return -1; } immediate = op->operands[1].immediate * op->operands[1].sign; if (op->operands[0].type & OT_GPREG && !(op->operands[0].type & OT_MEMORY)) { if (a->bits == 64 && ((op->operands[0].type & OT_QWORD) | (op->operands[1].type & OT_QWORD))) { if (!(op->operands[1].type & OT_CONSTANT) && op->operands[1].extended) { data[l++] = 0x49; } else { data[l++] = 0x48; } } else if (op->operands[0].extended) { data[l++] = 0x41; } if (op->operands[0].type & OT_WORD) { if (a->bits > 16) { data[l++] = 0x66; } } if (op->operands[0].type & OT_BYTE) { data[l++] = 0xb0 | op->operands[0].reg; data[l++] = immediate; } else { if (a->bits == 64 && ((op->operands[0].type & OT_QWORD) | (op->operands[1].type & OT_QWORD)) && immediate < UT32_MAX) { data[l++] = 0xc7; data[l++] = 0xc0 | op->operands[0].reg; } else { data[l++] = 0xb8 | op->operands[0].reg; } data[l++] = immediate; data[l++] = immediate >> 8; if (!(op->operands[0].type & OT_WORD)) { data[l++] = immediate >> 16; data[l++] = immediate >> 24; } if (a->bits == 64 && immediate > UT32_MAX) { data[l++] = immediate >> 32; data[l++] = immediate >> 40; data[l++] = immediate >> 48; data[l++] = immediate >> 56; } } } else if (op->operands[0].type & OT_MEMORY) { if (!op->operands[0].explicit_size) { if (op->operands[0].type & OT_GPREG) { ((Opcode *)op)->operands[0].dest_size = op->operands[0].reg_size; } else { return -1; } } int dest_bits = 8 * ((op->operands[0].dest_size & ALL_SIZE) >> OPSIZE_SHIFT); int reg_bits = 8 * ((op->operands[0].reg_size & ALL_SIZE) >> OPSIZE_SHIFT); int offset = op->operands[0].offset * op->operands[0].offset_sign; //addr_size_override prefix bool use_aso = false; if (reg_bits < a->bits) { use_aso = true; } //op_size_override prefix bool use_oso = false; if (dest_bits == 16) { use_oso = true; } bool rip_rel = op->operands[0].regs[0] == X86R_RIP; //rex prefix int rex = 1 << 6; bool use_rex = false; if (dest_bits == 64) { //W field use_rex = true; rex |= 1 << 3; } if (op->operands[0].extended) { //B field use_rex = true; rex |= 1; } //opcode selection int opcode; if (dest_bits == 8) { opcode = 0xc6; } else { opcode = 0xc7; } //modrm and SIB selection int modrm = 0; int mod; int reg = 0; int rm; bool use_sib = false; int sib; //mod if (offset == 0) { mod = 0; } else if (offset < 128 && offset > -129) { mod = 1; } else { mod = 2; } if (reg_bits == 16) { if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == X86R_SI) { rm = B0000; } else if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == X86R_DI) { rm = B0001; } else if (op->operands[0].regs[0] == X86R_BP && op->operands[0].regs[1] == X86R_SI) { rm = B0010; } else if (op->operands[0].regs[0] == X86R_BP && op->operands[0].regs[1] == X86R_DI) { rm = B0011; } else if (op->operands[0].regs[0] == X86R_SI && op->operands[0].regs[1] == -1) { rm = B0100; } else if (op->operands[0].regs[0] == X86R_DI && op->operands[0].regs[1] == -1) { rm = B0101; } else if (op->operands[0].regs[0] == X86R_BX && op->operands[0].regs[1] == -1) { rm = B0111; } else { //TODO allow for displacement only when parser is reworked return -1; } modrm = (mod << 6) | (reg << 3) | rm; } else { //rm if (op->operands[0].extended) { rm = op->operands[0].reg; } else { rm = op->operands[0].regs[0]; } //[epb] alone is illegal, so we need to fake a [ebp+0] if (rm == 5 && mod == 0) { mod = 1; } //sib int index = op->operands[0].regs[1]; int scale = getsib(op->operands[0].scale[1]); if (index != -1) { use_sib = true; sib = (scale << 6) | (index << 3) | rm; } else if (rm == 4) { use_sib = true; sib = 0x24; } if (use_sib) { rm = B0100; } if (rip_rel) { modrm = (B0000 << 6) | (reg << 3) | B0101; sib = (scale << 6) | (B0100 << 3) | B0101; } else { modrm = (mod << 6) | (reg << 3) | rm; } } //build the final result if (use_aso) { data[l++] = 0x67; } if (use_oso) { data[l++] = 0x66; } if (use_rex) { data[l++] = rex; } data[l++] = opcode; data[l++] = modrm; if (use_sib) { data[l++] = sib; } //offset if (mod == 1) { data[l++] = offset; } else if (reg_bits == 16 && mod == 2) { data[l++] = offset; data[l++] = offset >> 8; } else if (mod == 2 || rip_rel) { data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } //immediate int byte; for (byte = 0; byte < dest_bits && byte < 32; byte += 8) { data[l++] = (immediate >> byte); } } } else if (op->operands[1].type & OT_REGALL && !(op->operands[1].type & OT_MEMORY)) { if (op->operands[0].type & OT_CONSTANT) { return -1; } if (op->operands[0].type & OT_REGTYPE & OT_SEGMENTREG && op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { return -1; } // Check reg sizes match if (op->operands[0].type & OT_REGTYPE && op->operands[1].type & OT_REGTYPE) { if (!((op->operands[0].type & ALL_SIZE) & (op->operands[1].type & ALL_SIZE))) { return -1; } } if (a->bits == 64) { if (op->operands[0].extended) { rex = 1; } if (op->operands[1].extended) { rex += 4; } if (op->operands[1].type & OT_QWORD) { if (!(op->operands[0].type & OT_QWORD)) { data[l++] = 0x67; data[l++] = 0x48; } } if (op->operands[1].type & OT_QWORD && op->operands[0].type & OT_QWORD) { data[l++] = 0x48 | rex; } if (op->operands[1].type & OT_DWORD && op->operands[0].type & OT_DWORD) { data[l++] = 0x40 | rex; } } else if (op->operands[0].extended && op->operands[1].extended) { data[l++] = 0x45; } offset = op->operands[0].offset * op->operands[0].offset_sign; if (op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { data[l++] = 0x8c; } else { if (op->operands[0].type & OT_WORD) { data[l++] = 0x66; } data[l++] = (op->operands[0].type & OT_BYTE) ? 0x88 : 0x89; } if (op->operands[0].scale[0] > 1) { data[l++] = op->operands[1].reg << 3 | 4; data[l++] = getsib (op->operands[0].scale[0]) << 6 | op->operands[0].regs[0] << 3 | 5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; return l; } if (!(op->operands[0].type & OT_MEMORY)) { if (op->operands[0].reg == X86R_UNDEFINED || op->operands[1].reg == X86R_UNDEFINED) { return -1; } mod = 0x3; data[l++] = mod << 6 | op->operands[1].reg << 3 | op->operands[0].reg; } else if (op->operands[0].regs[0] == X86R_UNDEFINED) { data[l++] = op->operands[1].reg << 3 | 0x5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } else { if (op->operands[0].type & OT_MEMORY) { if (op->operands[0].regs[1] != X86R_UNDEFINED) { data[l++] = op->operands[1].reg << 3 | 0x4; data[l++] = op->operands[0].regs[1] << 3 | op->operands[0].regs[0]; return l; } if (offset) { mod = (offset > 128 || offset < -129) ? 0x2 : 0x1; } if (op->operands[0].regs[0] == X86R_EBP) { mod = 0x2; } data[l++] = mod << 6 | op->operands[1].reg << 3 | op->operands[0].regs[0]; if (op->operands[0].regs[0] == X86R_ESP) { data[l++] = 0x24; } if (offset) { data[l++] = offset; } if (mod == 2) { // warning C4293: '>>': shift count negative or too big, undefined behavior data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } } } else if (op->operands[1].type & OT_MEMORY) { if (op->operands[0].type & OT_MEMORY) { return -1; } offset = op->operands[1].offset * op->operands[1].offset_sign; if (op->operands[0].reg == X86R_EAX && op->operands[1].regs[0] == X86R_UNDEFINED) { if (a->bits == 64) { data[l++] = 0x48; } if (op->operands[0].type & OT_BYTE) { data[l++] = 0xa0; } else { data[l++] = 0xa1; } data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; if (a->bits == 64) { data[l++] = offset >> 32; data[l++] = offset >> 40; data[l++] = offset >> 48; data[l++] = offset >> 54; } return l; } if (op->operands[0].type & OT_BYTE && a->bits == 64 && op->operands[1].regs[0]) { if (op->operands[1].regs[0] >= X86R_R8 && op->operands[0].reg < 4) { data[l++] = 0x41; data[l++] = 0x8a; data[l++] = op->operands[0].reg << 3 | (op->operands[1].regs[0] - 8); return l; } return -1; } if (op->operands[1].type & OT_REGTYPE & OT_SEGMENTREG) { if (op->operands[1].scale[0] == 0) { return -1; } data[l++] = SEG_REG_PREFIXES[op->operands[1].regs[0] % 6]; data[l++] = 0x8b; data[l++] = (((ut32)op->operands[0].reg) << 3) | 0x5; data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; return l; } if (a->bits == 64) { if (op->operands[0].type & OT_QWORD) { if (!(op->operands[1].type & OT_QWORD)) { if (op->operands[1].regs[0] != -1) { data[l++] = 0x67; } data[l++] = 0x48; } } else if (op->operands[1].type & OT_DWORD) { data[l++] = 0x44; } else if (!(op->operands[1].type & OT_QWORD)) { data[l++] = 0x67; } if (op->operands[1].type & OT_QWORD && op->operands[0].type & OT_QWORD) { data[l++] = 0x48; } } if (op->operands[0].type & OT_WORD) { data[l++] = 0x66; data[l++] = op->operands[1].type & OT_BYTE ? 0x8a : 0x8b; } else { data[l++] = (op->operands[1].type & OT_BYTE || op->operands[0].type & OT_BYTE) ? 0x8a : 0x8b; } if (op->operands[1].regs[0] == X86R_UNDEFINED) { if (a->bits == 64) { data[l++] = op->operands[0].reg << 3 | 0x4; data[l++] = 0x25; } else { data[l++] = op->operands[0].reg << 3 | 0x5; } data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } else { if (op->operands[1].scale[0] > 1) { data[l++] = op->operands[0].reg << 3 | 4; if (op->operands[1].scale[0] >= 2) { base = 5; } if (base) { data[l++] = getsib (op->operands[1].scale[0]) << 6 | op->operands[1].regs[0] << 3 | base; } else { data[l++] = getsib (op->operands[1].scale[0]) << 3 | op->operands[1].regs[0]; } if (offset || base) { data[l++] = offset; data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } return l; } if (op->operands[1].regs[1] != X86R_UNDEFINED) { data[l++] = op->operands[0].reg << 3 | 0x4; data[l++] = op->operands[1].regs[1] << 3 | op->operands[1].regs[0]; return l; } if (offset || op->operands[1].regs[0] == X86R_EBP) { mod = 0x2; if (op->operands[1].offset > 127) { mod = 0x4; } } if (a->bits == 64 && offset && op->operands[0].type & OT_QWORD) { if (op->operands[1].regs[0] == X86R_RIP) { data[l++] = 0x5; } else { if (op->operands[1].offset > 127) { data[l++] = 0x80 | op->operands[0].reg << 3 | op->operands[1].regs[0]; } else { data[l++] = 0x40 | op->operands[1].regs[0]; } } if (op->operands[1].offset > 127) { mod = 0x1; } } else { if (op->operands[1].regs[0] == X86R_EIP && (op->operands[0].type & OT_DWORD)) { data[l++] = 0x0d; } else if (op->operands[1].regs[0] == X86R_RIP && (op->operands[0].type & OT_QWORD)) { data[l++] = 0x05; } else { data[l++] = mod << 5 | op->operands[0].reg << 3 | op->operands[1].regs[0]; } } if (op->operands[1].regs[0] == X86R_ESP) { data[l++] = 0x24; } if (mod >= 0x2) { data[l++] = offset; if (op->operands[1].offset > 128 || op->operands[1].regs[0] == X86R_EIP) { data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } else if (a->bits == 64 && (offset || op->operands[1].regs[0] == X86R_RIP)) { data[l++] = offset; if (op->operands[1].offset > 127 || op->operands[1].regs[0] == X86R_RIP) { data[l++] = offset >> 8; data[l++] = offset >> 16; data[l++] = offset >> 24; } } } } return l; }
{ "deleted": [ { "line_no": 352, "char_start": 9435, "char_end": 9493, "line": "\t\t\tdata[l++] = SEG_REG_PREFIXES[op->operands[1].regs[0]];\n" }, { "line_no": 354, "char_start": 9514, "char_end": 9561, "line": "\t\t\tdata[l++] = op->operands[0].reg << 3 | 0x5;\n" } ], "added": [ { "line_no": 352, "char_start": 9435, "char_end": 9497, "line": "\t\t\tdata[l++] = SEG_REG_PREFIXES[op->operands[1].regs[0] % 6];\n" }, { "line_no": 354, "char_start": 9518, "char_end": 9575, "line": "\t\t\tdata[l++] = (((ut32)op->operands[0].reg) << 3) | 0x5;\n" } ] }
{ "deleted": [], "added": [ { "char_start": 9490, "char_end": 9494, "chars": " % 6" }, { "char_start": 9533, "char_end": 9541, "chars": "(((ut32)" }, { "char_start": 9560, "char_end": 9561, "chars": ")" }, { "char_start": 9566, "char_end": 9567, "chars": ")" } ] }
github.com/radare/radare2/commit/f17bfd9f1da05f30f23a4dd05e9d2363e1406948
libr/asm/p/asm_x86_nz.c
cwe-125
ares_parse_a_reply
int ares_parse_a_reply(const unsigned char *abuf, int alen, struct hostent **host) { unsigned int qdcount, ancount; int status, i, rr_type, rr_class, rr_len, naddrs; long int len; int naliases; const unsigned char *aptr; char *hostname, *rr_name, *rr_data, **aliases; struct in_addr *addrs; struct hostent *hostent; /* Set *host to NULL for all failure cases. */ *host = NULL; /* Give up if abuf doesn't have room for a header. */ if (alen < HFIXEDSZ) return ARES_EBADRESP; /* Fetch the question and answer count from the header. */ qdcount = DNS_HEADER_QDCOUNT(abuf); ancount = DNS_HEADER_ANCOUNT(abuf); if (qdcount != 1) return ARES_EBADRESP; /* Expand the name from the question, and skip past the question. */ aptr = abuf + HFIXEDSZ; status = ares_expand_name(aptr, abuf, alen, &hostname, &len); if (status != ARES_SUCCESS) return status; if (aptr + len + QFIXEDSZ > abuf + alen) { free(hostname); return ARES_EBADRESP; } aptr += len + QFIXEDSZ; /* Allocate addresses and aliases; ancount gives an upper bound for both. */ addrs = malloc(ancount * sizeof(struct in_addr)); if (!addrs) { free(hostname); return ARES_ENOMEM; } aliases = malloc((ancount + 1) * sizeof(char *)); if (!aliases) { free(hostname); free(addrs); return ARES_ENOMEM; } naddrs = 0; naliases = 0; /* Examine each answer resource record (RR) in turn. */ for (i = 0; i < (int)ancount; i++) { /* Decode the RR up to the data field. */ status = ares_expand_name(aptr, abuf, alen, &rr_name, &len); if (status != ARES_SUCCESS) break; aptr += len; if (aptr + RRFIXEDSZ > abuf + alen) { free(rr_name); status = ARES_EBADRESP; break; } rr_type = DNS_RR_TYPE(aptr); rr_class = DNS_RR_CLASS(aptr); rr_len = DNS_RR_LEN(aptr); aptr += RRFIXEDSZ; if (rr_class == C_IN && rr_type == T_A && rr_len == sizeof(struct in_addr) && strcasecmp(rr_name, hostname) == 0) { memcpy(&addrs[naddrs], aptr, sizeof(struct in_addr)); naddrs++; status = ARES_SUCCESS; } if (rr_class == C_IN && rr_type == T_CNAME) { /* Record the RR name as an alias. */ aliases[naliases] = rr_name; naliases++; /* Decode the RR data and replace the hostname with it. */ status = ares_expand_name(aptr, abuf, alen, &rr_data, &len); if (status != ARES_SUCCESS) break; free(hostname); hostname = rr_data; } else free(rr_name); aptr += rr_len; if (aptr > abuf + alen) { status = ARES_EBADRESP; break; } } if (status == ARES_SUCCESS && naddrs == 0) status = ARES_ENODATA; if (status == ARES_SUCCESS) { /* We got our answer. Allocate memory to build the host entry. */ aliases[naliases] = NULL; hostent = malloc(sizeof(struct hostent)); if (hostent) { hostent->h_addr_list = malloc((naddrs + 1) * sizeof(char *)); if (hostent->h_addr_list) { /* Fill in the hostent and return successfully. */ hostent->h_name = hostname; hostent->h_aliases = aliases; hostent->h_addrtype = AF_INET; hostent->h_length = sizeof(struct in_addr); for (i = 0; i < naddrs; i++) hostent->h_addr_list[i] = (char *) &addrs[i]; hostent->h_addr_list[naddrs] = NULL; *host = hostent; return ARES_SUCCESS; } free(hostent); } status = ARES_ENOMEM; } for (i = 0; i < naliases; i++) free(aliases[i]); free(aliases); free(addrs); free(hostname); return status; }
int ares_parse_a_reply(const unsigned char *abuf, int alen, struct hostent **host) { unsigned int qdcount, ancount; int status, i, rr_type, rr_class, rr_len, naddrs; long int len; int naliases; const unsigned char *aptr; char *hostname, *rr_name, *rr_data, **aliases; struct in_addr *addrs; struct hostent *hostent; /* Set *host to NULL for all failure cases. */ *host = NULL; /* Give up if abuf doesn't have room for a header. */ if (alen < HFIXEDSZ) return ARES_EBADRESP; /* Fetch the question and answer count from the header. */ qdcount = DNS_HEADER_QDCOUNT(abuf); ancount = DNS_HEADER_ANCOUNT(abuf); if (qdcount != 1) return ARES_EBADRESP; /* Expand the name from the question, and skip past the question. */ aptr = abuf + HFIXEDSZ; status = ares_expand_name(aptr, abuf, alen, &hostname, &len); if (status != ARES_SUCCESS) return status; if (aptr + len + QFIXEDSZ > abuf + alen) { free(hostname); return ARES_EBADRESP; } aptr += len + QFIXEDSZ; /* Allocate addresses and aliases; ancount gives an upper bound for both. */ addrs = malloc(ancount * sizeof(struct in_addr)); if (!addrs) { free(hostname); return ARES_ENOMEM; } aliases = malloc((ancount + 1) * sizeof(char *)); if (!aliases) { free(hostname); free(addrs); return ARES_ENOMEM; } naddrs = 0; naliases = 0; /* Examine each answer resource record (RR) in turn. */ for (i = 0; i < (int)ancount; i++) { /* Decode the RR up to the data field. */ status = ares_expand_name(aptr, abuf, alen, &rr_name, &len); if (status != ARES_SUCCESS) break; aptr += len; if (aptr + RRFIXEDSZ > abuf + alen) { free(rr_name); status = ARES_EBADRESP; break; } rr_type = DNS_RR_TYPE(aptr); rr_class = DNS_RR_CLASS(aptr); rr_len = DNS_RR_LEN(aptr); aptr += RRFIXEDSZ; if (aptr + rr_len > abuf + alen) { free(rr_name); status = ARES_EBADRESP; break; } if (rr_class == C_IN && rr_type == T_A && rr_len == sizeof(struct in_addr) && strcasecmp(rr_name, hostname) == 0) { memcpy(&addrs[naddrs], aptr, sizeof(struct in_addr)); naddrs++; status = ARES_SUCCESS; } if (rr_class == C_IN && rr_type == T_CNAME) { /* Record the RR name as an alias. */ aliases[naliases] = rr_name; naliases++; /* Decode the RR data and replace the hostname with it. */ status = ares_expand_name(aptr, abuf, alen, &rr_data, &len); if (status != ARES_SUCCESS) break; free(hostname); hostname = rr_data; } else free(rr_name); aptr += rr_len; if (aptr > abuf + alen) { status = ARES_EBADRESP; break; } } if (status == ARES_SUCCESS && naddrs == 0) status = ARES_ENODATA; if (status == ARES_SUCCESS) { /* We got our answer. Allocate memory to build the host entry. */ aliases[naliases] = NULL; hostent = malloc(sizeof(struct hostent)); if (hostent) { hostent->h_addr_list = malloc((naddrs + 1) * sizeof(char *)); if (hostent->h_addr_list) { /* Fill in the hostent and return successfully. */ hostent->h_name = hostname; hostent->h_aliases = aliases; hostent->h_addrtype = AF_INET; hostent->h_length = sizeof(struct in_addr); for (i = 0; i < naddrs; i++) hostent->h_addr_list[i] = (char *) &addrs[i]; hostent->h_addr_list[naddrs] = NULL; *host = hostent; return ARES_SUCCESS; } free(hostent); } status = ARES_ENOMEM; } for (i = 0; i < naliases; i++) free(aliases[i]); free(aliases); free(addrs); free(hostname); return status; }
{ "deleted": [], "added": [ { "line_no": 73, "char_start": 1933, "char_end": 1972, "line": " if (aptr + rr_len > abuf + alen)\n" }, { "line_no": 74, "char_start": 1972, "char_end": 1975, "line": "\t{\n" }, { "line_no": 75, "char_start": 1975, "char_end": 1993, "line": "\t free(rr_name);\n" }, { "line_no": 76, "char_start": 1993, "char_end": 2020, "line": "\t status = ARES_EBADRESP;\n" }, { "line_no": 77, "char_start": 2020, "char_end": 2030, "line": "\t break;\n" }, { "line_no": 78, "char_start": 2030, "char_end": 2033, "line": "\t}\n" } ] }
{ "deleted": [], "added": [ { "char_start": 1933, "char_end": 2033, "chars": " if (aptr + rr_len > abuf + alen)\n\t{\n\t free(rr_name);\n\t status = ARES_EBADRESP;\n\t break;\n\t}\n" } ] }
github.com/resiprocate/resiprocate/commit/d67a9ca6fd06ca65d23e313bdbad1ef4dd3aa0df
rutil/dns/ares/ares_parse_a_reply.c
cwe-125
ExtractPostscript
static Image *ExtractPostscript(Image *image,const ImageInfo *image_info, MagickOffsetType PS_Offset,ssize_t PS_Size,ExceptionInfo *exception) { char postscript_file[MaxTextExtent]; const MagicInfo *magic_info; FILE *ps_file; ImageInfo *clone_info; Image *image2; unsigned char magick[2*MaxTextExtent]; if ((clone_info=CloneImageInfo(image_info)) == NULL) return(image); clone_info->blob=(void *) NULL; clone_info->length=0; /* Obtain temporary file */ (void) AcquireUniqueFilename(postscript_file); ps_file=fopen_utf8(postscript_file,"wb"); if (ps_file == (FILE *) NULL) goto FINISH; /* Copy postscript to temporary file */ (void) SeekBlob(image,PS_Offset,SEEK_SET); (void) ReadBlob(image, 2*MaxTextExtent, magick); (void) SeekBlob(image,PS_Offset,SEEK_SET); while(PS_Size-- > 0) { (void) fputc(ReadBlobByte(image),ps_file); } (void) fclose(ps_file); /* Detect file format - Check magic.mgk configuration file. */ magic_info=GetMagicInfo(magick,2*MaxTextExtent,exception); if(magic_info == (const MagicInfo *) NULL) goto FINISH_UNL; /* printf("Detected:%s \n",magic_info->name); */ if(exception->severity != UndefinedException) goto FINISH_UNL; if(magic_info->name == (char *) NULL) goto FINISH_UNL; (void) CopyMagickMemory(clone_info->magick,magic_info->name,MaxTextExtent); /* Read nested image */ /*FormatString(clone_info->filename,"%s:%s",magic_info->name,postscript_file);*/ FormatLocaleString(clone_info->filename,MaxTextExtent,"%s",postscript_file); image2=ReadImage(clone_info,exception); if (!image2) goto FINISH_UNL; /* Replace current image with new image while copying base image attributes. */ (void) CopyMagickMemory(image2->filename,image->filename,MaxTextExtent); (void) CopyMagickMemory(image2->magick_filename,image->magick_filename,MaxTextExtent); (void) CopyMagickMemory(image2->magick,image->magick,MaxTextExtent); image2->depth=image->depth; DestroyBlob(image2); image2->blob=ReferenceBlob(image->blob); if ((image->rows == 0) || (image->columns == 0)) DeleteImageFromList(&image); AppendImageToList(&image,image2); FINISH_UNL: (void) RelinquishUniqueFileResource(postscript_file); FINISH: DestroyImageInfo(clone_info); return(image); }
static Image *ExtractPostscript(Image *image,const ImageInfo *image_info, MagickOffsetType PS_Offset,ssize_t PS_Size,ExceptionInfo *exception) { char postscript_file[MaxTextExtent]; const MagicInfo *magic_info; FILE *ps_file; ImageInfo *clone_info; Image *image2; unsigned char magick[2*MaxTextExtent]; if ((clone_info=CloneImageInfo(image_info)) == NULL) return(image); clone_info->blob=(void *) NULL; clone_info->length=0; /* Obtain temporary file */ (void) AcquireUniqueFilename(postscript_file); ps_file=fopen_utf8(postscript_file,"wb"); if (ps_file == (FILE *) NULL) goto FINISH; /* Copy postscript to temporary file */ (void) SeekBlob(image,PS_Offset,SEEK_SET); (void) ReadBlob(image, 2*MaxTextExtent, magick); (void) SeekBlob(image,PS_Offset,SEEK_SET); while(PS_Size-- > 0) { (void) fputc(ReadBlobByte(image),ps_file); } (void) fclose(ps_file); /* Detect file format - Check magic.mgk configuration file. */ magic_info=GetMagicInfo(magick,2*MaxTextExtent,exception); if(magic_info == (const MagicInfo *) NULL) goto FINISH_UNL; /* printf("Detected:%s \n",magic_info->name); */ if(exception->severity != UndefinedException) goto FINISH_UNL; if(magic_info->name == (char *) NULL) goto FINISH_UNL; (void) strncpy(clone_info->magick,magic_info->name,MaxTextExtent); /* Read nested image */ /*FormatString(clone_info->filename,"%s:%s",magic_info->name,postscript_file);*/ FormatLocaleString(clone_info->filename,MaxTextExtent,"%s",postscript_file); image2=ReadImage(clone_info,exception); if (!image2) goto FINISH_UNL; /* Replace current image with new image while copying base image attributes. */ (void) CopyMagickMemory(image2->filename,image->filename,MaxTextExtent); (void) CopyMagickMemory(image2->magick_filename,image->magick_filename,MaxTextExtent); (void) CopyMagickMemory(image2->magick,image->magick,MaxTextExtent); image2->depth=image->depth; DestroyBlob(image2); image2->blob=ReferenceBlob(image->blob); if ((image->rows == 0) || (image->columns == 0)) DeleteImageFromList(&image); AppendImageToList(&image,image2); FINISH_UNL: (void) RelinquishUniqueFileResource(postscript_file); FINISH: DestroyImageInfo(clone_info); return(image); }
{ "deleted": [ { "line_no": 52, "char_start": 1318, "char_end": 1396, "line": " (void) CopyMagickMemory(clone_info->magick,magic_info->name,MaxTextExtent);\n" } ], "added": [ { "line_no": 52, "char_start": 1318, "char_end": 1387, "line": " (void) strncpy(clone_info->magick,magic_info->name,MaxTextExtent);\n" } ] }
{ "deleted": [ { "char_start": 1327, "char_end": 1329, "chars": "Co" }, { "char_start": 1330, "char_end": 1342, "chars": "yMagickMemor" } ], "added": [ { "char_start": 1327, "char_end": 1329, "chars": "st" }, { "char_start": 1330, "char_end": 1333, "chars": "ncp" } ] }
github.com/ImageMagick/ImageMagick/commit/a251039393f423c7858e63cab6aa98d17b8b7a41
coders/wpg.c
cwe-125
usb_get_bos_descriptor
int usb_get_bos_descriptor(struct usb_device *dev) { struct device *ddev = &dev->dev; struct usb_bos_descriptor *bos; struct usb_dev_cap_header *cap; unsigned char *buffer; int length, total_len, num, i; int ret; bos = kzalloc(sizeof(struct usb_bos_descriptor), GFP_KERNEL); if (!bos) return -ENOMEM; /* Get BOS descriptor */ ret = usb_get_descriptor(dev, USB_DT_BOS, 0, bos, USB_DT_BOS_SIZE); if (ret < USB_DT_BOS_SIZE) { dev_err(ddev, "unable to get BOS descriptor\n"); if (ret >= 0) ret = -ENOMSG; kfree(bos); return ret; } length = bos->bLength; total_len = le16_to_cpu(bos->wTotalLength); num = bos->bNumDeviceCaps; kfree(bos); if (total_len < length) return -EINVAL; dev->bos = kzalloc(sizeof(struct usb_host_bos), GFP_KERNEL); if (!dev->bos) return -ENOMEM; /* Now let's get the whole BOS descriptor set */ buffer = kzalloc(total_len, GFP_KERNEL); if (!buffer) { ret = -ENOMEM; goto err; } dev->bos->desc = (struct usb_bos_descriptor *)buffer; ret = usb_get_descriptor(dev, USB_DT_BOS, 0, buffer, total_len); if (ret < total_len) { dev_err(ddev, "unable to get BOS descriptor set\n"); if (ret >= 0) ret = -ENOMSG; goto err; } total_len -= length; for (i = 0; i < num; i++) { buffer += length; cap = (struct usb_dev_cap_header *)buffer; length = cap->bLength; if (total_len < length) break; total_len -= length; if (cap->bDescriptorType != USB_DT_DEVICE_CAPABILITY) { dev_warn(ddev, "descriptor type invalid, skip\n"); continue; } switch (cap->bDevCapabilityType) { case USB_CAP_TYPE_WIRELESS_USB: /* Wireless USB cap descriptor is handled by wusb */ break; case USB_CAP_TYPE_EXT: dev->bos->ext_cap = (struct usb_ext_cap_descriptor *)buffer; break; case USB_SS_CAP_TYPE: dev->bos->ss_cap = (struct usb_ss_cap_descriptor *)buffer; break; case USB_SSP_CAP_TYPE: dev->bos->ssp_cap = (struct usb_ssp_cap_descriptor *)buffer; break; case CONTAINER_ID_TYPE: dev->bos->ss_id = (struct usb_ss_container_id_descriptor *)buffer; break; case USB_PTM_CAP_TYPE: dev->bos->ptm_cap = (struct usb_ptm_cap_descriptor *)buffer; default: break; } } return 0; err: usb_release_bos_descriptor(dev); return ret; }
int usb_get_bos_descriptor(struct usb_device *dev) { struct device *ddev = &dev->dev; struct usb_bos_descriptor *bos; struct usb_dev_cap_header *cap; unsigned char *buffer; int length, total_len, num, i; int ret; bos = kzalloc(sizeof(struct usb_bos_descriptor), GFP_KERNEL); if (!bos) return -ENOMEM; /* Get BOS descriptor */ ret = usb_get_descriptor(dev, USB_DT_BOS, 0, bos, USB_DT_BOS_SIZE); if (ret < USB_DT_BOS_SIZE) { dev_err(ddev, "unable to get BOS descriptor\n"); if (ret >= 0) ret = -ENOMSG; kfree(bos); return ret; } length = bos->bLength; total_len = le16_to_cpu(bos->wTotalLength); num = bos->bNumDeviceCaps; kfree(bos); if (total_len < length) return -EINVAL; dev->bos = kzalloc(sizeof(struct usb_host_bos), GFP_KERNEL); if (!dev->bos) return -ENOMEM; /* Now let's get the whole BOS descriptor set */ buffer = kzalloc(total_len, GFP_KERNEL); if (!buffer) { ret = -ENOMEM; goto err; } dev->bos->desc = (struct usb_bos_descriptor *)buffer; ret = usb_get_descriptor(dev, USB_DT_BOS, 0, buffer, total_len); if (ret < total_len) { dev_err(ddev, "unable to get BOS descriptor set\n"); if (ret >= 0) ret = -ENOMSG; goto err; } total_len -= length; for (i = 0; i < num; i++) { buffer += length; cap = (struct usb_dev_cap_header *)buffer; if (total_len < sizeof(*cap) || total_len < cap->bLength) { dev->bos->desc->bNumDeviceCaps = i; break; } length = cap->bLength; total_len -= length; if (cap->bDescriptorType != USB_DT_DEVICE_CAPABILITY) { dev_warn(ddev, "descriptor type invalid, skip\n"); continue; } switch (cap->bDevCapabilityType) { case USB_CAP_TYPE_WIRELESS_USB: /* Wireless USB cap descriptor is handled by wusb */ break; case USB_CAP_TYPE_EXT: dev->bos->ext_cap = (struct usb_ext_cap_descriptor *)buffer; break; case USB_SS_CAP_TYPE: dev->bos->ss_cap = (struct usb_ss_cap_descriptor *)buffer; break; case USB_SSP_CAP_TYPE: dev->bos->ssp_cap = (struct usb_ssp_cap_descriptor *)buffer; break; case CONTAINER_ID_TYPE: dev->bos->ss_id = (struct usb_ss_container_id_descriptor *)buffer; break; case USB_PTM_CAP_TYPE: dev->bos->ptm_cap = (struct usb_ptm_cap_descriptor *)buffer; default: break; } } return 0; err: usb_release_bos_descriptor(dev); return ret; }
{ "deleted": [ { "line_no": 55, "char_start": 1313, "char_end": 1338, "line": "\t\tlength = cap->bLength;\n" }, { "line_no": 57, "char_start": 1339, "char_end": 1365, "line": "\t\tif (total_len < length)\n" } ], "added": [ { "line_no": 56, "char_start": 1314, "char_end": 1376, "line": "\t\tif (total_len < sizeof(*cap) || total_len < cap->bLength) {\n" }, { "line_no": 57, "char_start": 1376, "char_end": 1415, "line": "\t\t\tdev->bos->desc->bNumDeviceCaps = i;\n" }, { "line_no": 59, "char_start": 1425, "char_end": 1429, "line": "\t\t}\n" }, { "line_no": 60, "char_start": 1429, "char_end": 1454, "line": "\t\tlength = cap->bLength;\n" } ] }
{ "deleted": [ { "char_start": 1318, "char_end": 1321, "chars": "gth" }, { "char_start": 1322, "char_end": 1323, "chars": "=" }, { "char_start": 1327, "char_end": 1343, "chars": "->bLength;\n\n\t\tif" }, { "char_start": 1344, "char_end": 1345, "chars": "(" }, { "char_start": 1357, "char_end": 1358, "chars": "l" } ], "added": [ { "char_start": 1313, "char_end": 1314, "chars": "\n" }, { "char_start": 1316, "char_end": 1326, "chars": "if (total_" }, { "char_start": 1330, "char_end": 1331, "chars": "<" }, { "char_start": 1332, "char_end": 1340, "chars": "sizeof(*" }, { "char_start": 1343, "char_end": 1347, "chars": ") ||" }, { "char_start": 1360, "char_end": 1367, "chars": "cap->bL" }, { "char_start": 1373, "char_end": 1414, "chars": " {\n\t\t\tdev->bos->desc->bNumDeviceCaps = i;" }, { "char_start": 1423, "char_end": 1452, "chars": ";\n\t\t}\n\t\tlength = cap->bLength" } ] }
github.com/torvalds/linux/commit/1c0edc3633b56000e18d82fc241e3995ca18a69e
drivers/usb/core/config.c
cwe-125
Mat_VarReadNextInfo4
Mat_VarReadNextInfo4(mat_t *mat) { int M,O,data_type,class_type; mat_int32_t tmp; long nBytes; size_t readresult; matvar_t *matvar = NULL; union { mat_uint32_t u; mat_uint8_t c[4]; } endian; if ( mat == NULL || mat->fp == NULL ) return NULL; else if ( NULL == (matvar = Mat_VarCalloc()) ) return NULL; readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } endian.u = 0x01020304; /* See if MOPT may need byteswapping */ if ( tmp < 0 || tmp > 4052 ) { if ( Mat_int32Swap(&tmp) > 4052 ) { Mat_VarFree(matvar); return NULL; } } M = (int)floor(tmp / 1000.0); switch ( M ) { case 0: /* IEEE little endian */ mat->byteswap = endian.c[0] != 4; break; case 1: /* IEEE big endian */ mat->byteswap = endian.c[0] != 1; break; default: /* VAX, Cray, or bogus */ Mat_VarFree(matvar); return NULL; } tmp -= M*1000; O = (int)floor(tmp / 100.0); /* O must be zero */ if ( 0 != O ) { Mat_VarFree(matvar); return NULL; } tmp -= O*100; data_type = (int)floor(tmp / 10.0); /* Convert the V4 data type */ switch ( data_type ) { case 0: matvar->data_type = MAT_T_DOUBLE; break; case 1: matvar->data_type = MAT_T_SINGLE; break; case 2: matvar->data_type = MAT_T_INT32; break; case 3: matvar->data_type = MAT_T_INT16; break; case 4: matvar->data_type = MAT_T_UINT16; break; case 5: matvar->data_type = MAT_T_UINT8; break; default: Mat_VarFree(matvar); return NULL; } tmp -= data_type*10; class_type = (int)floor(tmp / 1.0); switch ( class_type ) { case 0: matvar->class_type = MAT_C_DOUBLE; break; case 1: matvar->class_type = MAT_C_CHAR; break; case 2: matvar->class_type = MAT_C_SPARSE; break; default: Mat_VarFree(matvar); return NULL; } matvar->rank = 2; matvar->dims = (size_t*)calloc(2, sizeof(*matvar->dims)); if ( NULL == matvar->dims ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( mat->byteswap ) Mat_int32Swap(&tmp); matvar->dims[0] = tmp; if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( mat->byteswap ) Mat_int32Swap(&tmp); matvar->dims[1] = tmp; if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&(matvar->isComplex),sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } if ( matvar->isComplex && MAT_C_CHAR == matvar->class_type ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } if ( mat->byteswap ) Mat_int32Swap(&tmp); /* Check that the length of the variable name is at least 1 */ if ( tmp < 1 ) { Mat_VarFree(matvar); return NULL; } matvar->name = (char*)malloc(tmp); if ( NULL == matvar->name ) { Mat_VarFree(matvar); return NULL; } readresult = fread(matvar->name,1,tmp,(FILE*)mat->fp); if ( tmp != readresult ) { Mat_VarFree(matvar); return NULL; } matvar->internal->datapos = ftell((FILE*)mat->fp); if ( matvar->internal->datapos == -1L ) { Mat_VarFree(matvar); Mat_Critical("Couldn't determine file position"); return NULL; } { int err; size_t tmp2 = Mat_SizeOf(matvar->data_type); if ( matvar->isComplex ) tmp2 *= 2; err = SafeMulDims(matvar, &tmp2); if ( err ) { Mat_VarFree(matvar); Mat_Critical("Integer multiplication overflow"); return NULL; } nBytes = (long)tmp2; } (void)fseek((FILE*)mat->fp,nBytes,SEEK_CUR); return matvar; }
Mat_VarReadNextInfo4(mat_t *mat) { int M,O,data_type,class_type; mat_int32_t tmp; long nBytes; size_t readresult; matvar_t *matvar = NULL; union { mat_uint32_t u; mat_uint8_t c[4]; } endian; if ( mat == NULL || mat->fp == NULL ) return NULL; else if ( NULL == (matvar = Mat_VarCalloc()) ) return NULL; readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } endian.u = 0x01020304; /* See if MOPT may need byteswapping */ if ( tmp < 0 || tmp > 4052 ) { if ( Mat_int32Swap(&tmp) > 4052 ) { Mat_VarFree(matvar); return NULL; } } M = (int)floor(tmp / 1000.0); switch ( M ) { case 0: /* IEEE little endian */ mat->byteswap = endian.c[0] != 4; break; case 1: /* IEEE big endian */ mat->byteswap = endian.c[0] != 1; break; default: /* VAX, Cray, or bogus */ Mat_VarFree(matvar); return NULL; } tmp -= M*1000; O = (int)floor(tmp / 100.0); /* O must be zero */ if ( 0 != O ) { Mat_VarFree(matvar); return NULL; } tmp -= O*100; data_type = (int)floor(tmp / 10.0); /* Convert the V4 data type */ switch ( data_type ) { case 0: matvar->data_type = MAT_T_DOUBLE; break; case 1: matvar->data_type = MAT_T_SINGLE; break; case 2: matvar->data_type = MAT_T_INT32; break; case 3: matvar->data_type = MAT_T_INT16; break; case 4: matvar->data_type = MAT_T_UINT16; break; case 5: matvar->data_type = MAT_T_UINT8; break; default: Mat_VarFree(matvar); return NULL; } tmp -= data_type*10; class_type = (int)floor(tmp / 1.0); switch ( class_type ) { case 0: matvar->class_type = MAT_C_DOUBLE; break; case 1: matvar->class_type = MAT_C_CHAR; break; case 2: matvar->class_type = MAT_C_SPARSE; break; default: Mat_VarFree(matvar); return NULL; } matvar->rank = 2; matvar->dims = (size_t*)calloc(2, sizeof(*matvar->dims)); if ( NULL == matvar->dims ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( mat->byteswap ) Mat_int32Swap(&tmp); matvar->dims[0] = tmp; if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( mat->byteswap ) Mat_int32Swap(&tmp); matvar->dims[1] = tmp; if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&(matvar->isComplex),sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } if ( matvar->isComplex && MAT_C_CHAR == matvar->class_type ) { Mat_VarFree(matvar); return NULL; } readresult = fread(&tmp,sizeof(int),1,(FILE*)mat->fp); if ( 1 != readresult ) { Mat_VarFree(matvar); return NULL; } if ( mat->byteswap ) Mat_int32Swap(&tmp); /* Check that the length of the variable name is at least 1 */ if ( tmp < 1 ) { Mat_VarFree(matvar); return NULL; } matvar->name = (char*)malloc(tmp); if ( NULL == matvar->name ) { Mat_VarFree(matvar); return NULL; } readresult = fread(matvar->name,1,tmp,(FILE*)mat->fp); if ( tmp != readresult ) { Mat_VarFree(matvar); return NULL; } else { matvar->name[tmp - 1] = '\0'; } matvar->internal->datapos = ftell((FILE*)mat->fp); if ( matvar->internal->datapos == -1L ) { Mat_VarFree(matvar); Mat_Critical("Couldn't determine file position"); return NULL; } { int err; size_t tmp2 = Mat_SizeOf(matvar->data_type); if ( matvar->isComplex ) tmp2 *= 2; err = SafeMulDims(matvar, &tmp2); if ( err ) { Mat_VarFree(matvar); Mat_Critical("Integer multiplication overflow"); return NULL; } nBytes = (long)tmp2; } (void)fseek((FILE*)mat->fp,nBytes,SEEK_CUR); return matvar; }
{ "deleted": [], "added": [ { "line_no": 155, "char_start": 3941, "char_end": 3954, "line": " } else {\n" }, { "line_no": 156, "char_start": 3954, "char_end": 3992, "line": " matvar->name[tmp - 1] = '\\0';\n" } ] }
{ "deleted": [], "added": [ { "char_start": 3946, "char_end": 3997, "chars": " else {\n matvar->name[tmp - 1] = '\\0';\n }" } ] }
github.com/tbeu/matio/commit/651a8e28099edb5fbb9e4e1d4d3238848f446c9a
src/mat4.c
cwe-125
getToken
static x86newTokenType getToken(const char *str, size_t *begin, size_t *end) { // Skip whitespace while (begin && isspace ((ut8)str[*begin])) { ++(*begin); } if (!str[*begin]) { // null byte *end = *begin; return TT_EOF; } else if (isalpha ((ut8)str[*begin])) { // word token *end = *begin; while (end && isalnum ((ut8)str[*end])) { ++(*end); } return TT_WORD; } else if (isdigit ((ut8)str[*begin])) { // number token *end = *begin; while (end && isalnum ((ut8)str[*end])) { // accept alphanumeric characters, because hex. ++(*end); } return TT_NUMBER; } else { // special character: [, ], +, *, ... *end = *begin + 1; return TT_SPECIAL; } }
static x86newTokenType getToken(const char *str, size_t *begin, size_t *end) { if (*begin > strlen (str)) { return TT_EOF; } // Skip whitespace while (begin && str[*begin] && isspace ((ut8)str[*begin])) { ++(*begin); } if (!str[*begin]) { // null byte *end = *begin; return TT_EOF; } if (isalpha ((ut8)str[*begin])) { // word token *end = *begin; while (end && str[*end] && isalnum ((ut8)str[*end])) { ++(*end); } return TT_WORD; } if (isdigit ((ut8)str[*begin])) { // number token *end = *begin; while (end && isalnum ((ut8)str[*end])) { // accept alphanumeric characters, because hex. ++(*end); } return TT_NUMBER; } else { // special character: [, ], +, *, ... *end = *begin + 1; return TT_SPECIAL; } }
{ "deleted": [ { "line_no": 3, "char_start": 99, "char_end": 146, "line": "\twhile (begin && isspace ((ut8)str[*begin])) {\n" }, { "line_no": 10, "char_start": 247, "char_end": 305, "line": "\t} else if (isalpha ((ut8)str[*begin])) { // word token\n" }, { "line_no": 12, "char_start": 322, "char_end": 366, "line": "\t\twhile (end && isalnum ((ut8)str[*end])) {\n" }, { "line_no": 16, "char_start": 401, "char_end": 461, "line": "\t} else if (isdigit ((ut8)str[*begin])) { // number token\n" } ], "added": [ { "line_no": 2, "char_start": 79, "char_end": 109, "line": "\tif (*begin > strlen (str)) {\n" }, { "line_no": 3, "char_start": 109, "char_end": 126, "line": "\t\treturn TT_EOF;\n" }, { "line_no": 4, "char_start": 126, "char_end": 129, "line": "\t}\n" }, { "line_no": 6, "char_start": 149, "char_end": 211, "line": "\twhile (begin && str[*begin] && isspace ((ut8)str[*begin])) {\n" }, { "line_no": 13, "char_start": 312, "char_end": 315, "line": "\t}\n" }, { "line_no": 14, "char_start": 315, "char_end": 366, "line": "\tif (isalpha ((ut8)str[*begin])) { // word token\n" }, { "line_no": 16, "char_start": 383, "char_end": 440, "line": "\t\twhile (end && str[*end] && isalnum ((ut8)str[*end])) {\n" }, { "line_no": 20, "char_start": 475, "char_end": 478, "line": "\t}\n" }, { "line_no": 21, "char_start": 478, "char_end": 531, "line": "\tif (isdigit ((ut8)str[*begin])) { // number token\n" } ] }
{ "deleted": [ { "char_start": 249, "char_end": 255, "chars": " else " }, { "char_start": 403, "char_end": 409, "chars": " else " } ], "added": [ { "char_start": 80, "char_end": 130, "chars": "if (*begin > strlen (str)) {\n\t\treturn TT_EOF;\n\t}\n\t" }, { "char_start": 162, "char_end": 177, "chars": " && str[*begin]" }, { "char_start": 314, "char_end": 316, "chars": "\n\t" }, { "char_start": 395, "char_end": 408, "chars": " && str[*end]" }, { "char_start": 477, "char_end": 479, "chars": "\n\t" } ] }
github.com/radare/radare2/commit/66191f780863ea8c66ace4040d0d04a8842e8432
libr/asm/p/asm_x86_nz.c
cwe-125
fiber_switch
fiber_switch(mrb_state *mrb, mrb_value self, mrb_int len, const mrb_value *a, mrb_bool resume, mrb_bool vmexec) { struct mrb_context *c = fiber_check(mrb, self); struct mrb_context *old_c = mrb->c; mrb_value value; fiber_check_cfunc(mrb, c); if (resume && c->status == MRB_FIBER_TRANSFERRED) { mrb_raise(mrb, E_FIBER_ERROR, "resuming transferred fiber"); } if (c->status == MRB_FIBER_RUNNING || c->status == MRB_FIBER_RESUMED) { mrb_raise(mrb, E_FIBER_ERROR, "double resume (fib)"); } if (c->status == MRB_FIBER_TERMINATED) { mrb_raise(mrb, E_FIBER_ERROR, "resuming dead fiber"); } mrb->c->status = resume ? MRB_FIBER_RESUMED : MRB_FIBER_TRANSFERRED; c->prev = resume ? mrb->c : (c->prev ? c->prev : mrb->root_c); if (c->status == MRB_FIBER_CREATED) { mrb_value *b, *e; if (len >= c->stend - c->stack) { mrb_raise(mrb, E_FIBER_ERROR, "too many arguments to fiber"); } b = c->stack+1; e = b + len; while (b<e) { *b++ = *a++; } c->cibase->argc = (int)len; value = c->stack[0] = MRB_PROC_ENV(c->ci->proc)->stack[0]; } else { value = fiber_result(mrb, a, len); } fiber_switch_context(mrb, c); if (vmexec) { c->vmexec = TRUE; value = mrb_vm_exec(mrb, c->ci[-1].proc, c->ci->pc); mrb->c = old_c; } else { MARK_CONTEXT_MODIFY(c); } return value; }
fiber_switch(mrb_state *mrb, mrb_value self, mrb_int len, const mrb_value *a, mrb_bool resume, mrb_bool vmexec) { struct mrb_context *c = fiber_check(mrb, self); struct mrb_context *old_c = mrb->c; enum mrb_fiber_state status; mrb_value value; fiber_check_cfunc(mrb, c); status = c->status; if (resume && status == MRB_FIBER_TRANSFERRED) { mrb_raise(mrb, E_FIBER_ERROR, "resuming transferred fiber"); } if (status == MRB_FIBER_RUNNING || status == MRB_FIBER_RESUMED) { mrb_raise(mrb, E_FIBER_ERROR, "double resume (fib)"); } if (status == MRB_FIBER_TERMINATED) { mrb_raise(mrb, E_FIBER_ERROR, "resuming dead fiber"); } old_c->status = resume ? MRB_FIBER_RESUMED : MRB_FIBER_TRANSFERRED; c->prev = resume ? mrb->c : (c->prev ? c->prev : mrb->root_c); fiber_switch_context(mrb, c); if (status == MRB_FIBER_CREATED) { mrb_value *b, *e; mrb_stack_extend(mrb, len+2); /* for receiver and (optional) block */ b = c->stack+1; e = b + len; while (b<e) { *b++ = *a++; } c->cibase->argc = (int)len; value = c->stack[0] = MRB_PROC_ENV(c->ci->proc)->stack[0]; } else { value = fiber_result(mrb, a, len); } if (vmexec) { c->vmexec = TRUE; value = mrb_vm_exec(mrb, c->ci[-1].proc, c->ci->pc); mrb->c = old_c; } else { MARK_CONTEXT_MODIFY(c); } return value; }
{ "deleted": [ { "line_no": 8, "char_start": 251, "char_end": 305, "line": " if (resume && c->status == MRB_FIBER_TRANSFERRED) {\n" }, { "line_no": 11, "char_start": 374, "char_end": 448, "line": " if (c->status == MRB_FIBER_RUNNING || c->status == MRB_FIBER_RESUMED) {\n" }, { "line_no": 14, "char_start": 510, "char_end": 553, "line": " if (c->status == MRB_FIBER_TERMINATED) {\n" }, { "line_no": 17, "char_start": 615, "char_end": 686, "line": " mrb->c->status = resume ? MRB_FIBER_RESUMED : MRB_FIBER_TRANSFERRED;\n" }, { "line_no": 19, "char_start": 751, "char_end": 791, "line": " if (c->status == MRB_FIBER_CREATED) {\n" }, { "line_no": 22, "char_start": 814, "char_end": 852, "line": " if (len >= c->stend - c->stack) {\n" }, { "line_no": 23, "char_start": 852, "char_end": 920, "line": " mrb_raise(mrb, E_FIBER_ERROR, \"too many arguments to fiber\");\n" }, { "line_no": 24, "char_start": 920, "char_end": 926, "line": " }\n" }, { "line_no": 36, "char_start": 1157, "char_end": 1189, "line": " fiber_switch_context(mrb, c);\n" } ], "added": [ { "line_no": 5, "char_start": 202, "char_end": 233, "line": " enum mrb_fiber_state status;\n" }, { "line_no": 9, "char_start": 282, "char_end": 304, "line": " status = c->status;\n" }, { "line_no": 10, "char_start": 304, "char_end": 355, "line": " if (resume && status == MRB_FIBER_TRANSFERRED) {\n" }, { "line_no": 13, "char_start": 424, "char_end": 492, "line": " if (status == MRB_FIBER_RUNNING || status == MRB_FIBER_RESUMED) {\n" }, { "line_no": 16, "char_start": 554, "char_end": 594, "line": " if (status == MRB_FIBER_TERMINATED) {\n" }, { "line_no": 19, "char_start": 656, "char_end": 726, "line": " old_c->status = resume ? MRB_FIBER_RESUMED : MRB_FIBER_TRANSFERRED;\n" }, { "line_no": 21, "char_start": 791, "char_end": 823, "line": " fiber_switch_context(mrb, c);\n" }, { "line_no": 22, "char_start": 823, "char_end": 860, "line": " if (status == MRB_FIBER_CREATED) {\n" }, { "line_no": 25, "char_start": 883, "char_end": 957, "line": " mrb_stack_extend(mrb, len+2); /* for receiver and (optional) block */\n" } ] }
{ "deleted": [ { "char_start": 267, "char_end": 270, "chars": "c->" }, { "char_start": 380, "char_end": 383, "chars": "c->" }, { "char_start": 414, "char_end": 417, "chars": "c->" }, { "char_start": 516, "char_end": 519, "chars": "c->" }, { "char_start": 617, "char_end": 622, "chars": "mrb->" }, { "char_start": 757, "char_end": 760, "chars": "c->" }, { "char_start": 818, "char_end": 843, "chars": "if (len >= c->stend - c->" }, { "char_start": 848, "char_end": 861, "chars": ") {\n mrb" }, { "char_start": 862, "char_end": 866, "chars": "rais" }, { "char_start": 873, "char_end": 887, "chars": "E_FIBER_ERROR," }, { "char_start": 888, "char_end": 891, "chars": "\"to" }, { "char_start": 893, "char_end": 897, "chars": "many" }, { "char_start": 899, "char_end": 904, "chars": "rgume" }, { "char_start": 905, "char_end": 907, "chars": "ts" }, { "char_start": 908, "char_end": 909, "chars": "t" }, { "char_start": 910, "char_end": 912, "chars": " f" }, { "char_start": 913, "char_end": 917, "chars": "ber\"" }, { "char_start": 918, "char_end": 920, "chars": ";\n" }, { "char_start": 921, "char_end": 923, "chars": " " }, { "char_start": 924, "char_end": 925, "chars": "}" }, { "char_start": 1156, "char_end": 1188, "chars": "\n fiber_switch_context(mrb, c);" } ], "added": [ { "char_start": 204, "char_end": 235, "chars": "enum mrb_fiber_state status;\n " }, { "char_start": 284, "char_end": 306, "chars": "status = c->status;\n " }, { "char_start": 658, "char_end": 662, "chars": "old_" }, { "char_start": 793, "char_end": 825, "chars": "fiber_switch_context(mrb, c);\n " }, { "char_start": 887, "char_end": 891, "chars": "mrb_" }, { "char_start": 898, "char_end": 903, "chars": "xtend" }, { "char_start": 909, "char_end": 910, "chars": "l" }, { "char_start": 912, "char_end": 916, "chars": "+2);" }, { "char_start": 917, "char_end": 919, "chars": "/*" }, { "char_start": 921, "char_end": 928, "chars": "or rece" }, { "char_start": 929, "char_end": 930, "chars": "v" }, { "char_start": 933, "char_end": 936, "chars": "and" }, { "char_start": 937, "char_end": 947, "chars": "(optional)" }, { "char_start": 948, "char_end": 953, "chars": "block" }, { "char_start": 954, "char_end": 956, "chars": "*/" } ] }
github.com/mruby/mruby/commit/778500563a9f7ceba996937dc886bd8cde29b42b
mrbgems/mruby-fiber/src/fiber.c
cwe-125
start_input_ppm
start_input_ppm(j_compress_ptr cinfo, cjpeg_source_ptr sinfo) { ppm_source_ptr source = (ppm_source_ptr)sinfo; int c; unsigned int w, h, maxval; boolean need_iobuffer, use_raw_buffer, need_rescale; if (getc(source->pub.input_file) != 'P') ERREXIT(cinfo, JERR_PPM_NOT); c = getc(source->pub.input_file); /* subformat discriminator character */ /* detect unsupported variants (ie, PBM) before trying to read header */ switch (c) { case '2': /* it's a text-format PGM file */ case '3': /* it's a text-format PPM file */ case '5': /* it's a raw-format PGM file */ case '6': /* it's a raw-format PPM file */ break; default: ERREXIT(cinfo, JERR_PPM_NOT); break; } /* fetch the remaining header info */ w = read_pbm_integer(cinfo, source->pub.input_file, 65535); h = read_pbm_integer(cinfo, source->pub.input_file, 65535); maxval = read_pbm_integer(cinfo, source->pub.input_file, 65535); if (w <= 0 || h <= 0 || maxval <= 0) /* error check */ ERREXIT(cinfo, JERR_PPM_NOT); cinfo->data_precision = BITS_IN_JSAMPLE; /* we always rescale data to this */ cinfo->image_width = (JDIMENSION)w; cinfo->image_height = (JDIMENSION)h; source->maxval = maxval; /* initialize flags to most common settings */ need_iobuffer = TRUE; /* do we need an I/O buffer? */ use_raw_buffer = FALSE; /* do we map input buffer onto I/O buffer? */ need_rescale = TRUE; /* do we need a rescale array? */ switch (c) { case '2': /* it's a text-format PGM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_GRAYSCALE; TRACEMS2(cinfo, 1, JTRC_PGM_TEXT, w, h); if (cinfo->in_color_space == JCS_GRAYSCALE) source->pub.get_pixel_rows = get_text_gray_row; else if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_text_gray_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_text_gray_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); need_iobuffer = FALSE; break; case '3': /* it's a text-format PPM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_EXT_RGB; TRACEMS2(cinfo, 1, JTRC_PPM_TEXT, w, h); if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_text_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_text_rgb_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); need_iobuffer = FALSE; break; case '5': /* it's a raw-format PGM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_GRAYSCALE; TRACEMS2(cinfo, 1, JTRC_PGM, w, h); if (maxval > 255) { source->pub.get_pixel_rows = get_word_gray_row; } else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) && cinfo->in_color_space == JCS_GRAYSCALE) { source->pub.get_pixel_rows = get_raw_row; use_raw_buffer = TRUE; need_rescale = FALSE; } else { if (cinfo->in_color_space == JCS_GRAYSCALE) source->pub.get_pixel_rows = get_scaled_gray_row; else if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_gray_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_gray_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); } break; case '6': /* it's a raw-format PPM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_EXT_RGB; TRACEMS2(cinfo, 1, JTRC_PPM, w, h); if (maxval > 255) { source->pub.get_pixel_rows = get_word_rgb_row; } else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) && (cinfo->in_color_space == JCS_EXT_RGB #if RGB_RED == 0 && RGB_GREEN == 1 && RGB_BLUE == 2 && RGB_PIXELSIZE == 3 || cinfo->in_color_space == JCS_RGB #endif )) { source->pub.get_pixel_rows = get_raw_row; use_raw_buffer = TRUE; need_rescale = FALSE; } else { if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_rgb_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); } break; } if (IsExtRGB(cinfo->in_color_space)) cinfo->input_components = rgb_pixelsize[cinfo->in_color_space]; else if (cinfo->in_color_space == JCS_GRAYSCALE) cinfo->input_components = 1; else if (cinfo->in_color_space == JCS_CMYK) cinfo->input_components = 4; /* Allocate space for I/O buffer: 1 or 3 bytes or words/pixel. */ if (need_iobuffer) { if (c == '6') source->buffer_width = (size_t)w * 3 * ((maxval <= 255) ? sizeof(U_CHAR) : (2 * sizeof(U_CHAR))); else source->buffer_width = (size_t)w * ((maxval <= 255) ? sizeof(U_CHAR) : (2 * sizeof(U_CHAR))); source->iobuffer = (U_CHAR *) (*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_IMAGE, source->buffer_width); } /* Create compressor input buffer. */ if (use_raw_buffer) { /* For unscaled raw-input case, we can just map it onto the I/O buffer. */ /* Synthesize a JSAMPARRAY pointer structure */ source->pixrow = (JSAMPROW)source->iobuffer; source->pub.buffer = &source->pixrow; source->pub.buffer_height = 1; } else { /* Need to translate anyway, so make a separate sample buffer. */ source->pub.buffer = (*cinfo->mem->alloc_sarray) ((j_common_ptr)cinfo, JPOOL_IMAGE, (JDIMENSION)w * cinfo->input_components, (JDIMENSION)1); source->pub.buffer_height = 1; } /* Compute the rescaling array if required. */ if (need_rescale) { long val, half_maxval; /* On 16-bit-int machines we have to be careful of maxval = 65535 */ source->rescale = (JSAMPLE *) (*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_IMAGE, (size_t)(((long)maxval + 1L) * sizeof(JSAMPLE))); half_maxval = maxval / 2; for (val = 0; val <= (long)maxval; val++) { /* The multiplication here must be done in 32 bits to avoid overflow */ source->rescale[val] = (JSAMPLE)((val * MAXJSAMPLE + half_maxval) / maxval); } } }
start_input_ppm(j_compress_ptr cinfo, cjpeg_source_ptr sinfo) { ppm_source_ptr source = (ppm_source_ptr)sinfo; int c; unsigned int w, h, maxval; boolean need_iobuffer, use_raw_buffer, need_rescale; if (getc(source->pub.input_file) != 'P') ERREXIT(cinfo, JERR_PPM_NOT); c = getc(source->pub.input_file); /* subformat discriminator character */ /* detect unsupported variants (ie, PBM) before trying to read header */ switch (c) { case '2': /* it's a text-format PGM file */ case '3': /* it's a text-format PPM file */ case '5': /* it's a raw-format PGM file */ case '6': /* it's a raw-format PPM file */ break; default: ERREXIT(cinfo, JERR_PPM_NOT); break; } /* fetch the remaining header info */ w = read_pbm_integer(cinfo, source->pub.input_file, 65535); h = read_pbm_integer(cinfo, source->pub.input_file, 65535); maxval = read_pbm_integer(cinfo, source->pub.input_file, 65535); if (w <= 0 || h <= 0 || maxval <= 0) /* error check */ ERREXIT(cinfo, JERR_PPM_NOT); cinfo->data_precision = BITS_IN_JSAMPLE; /* we always rescale data to this */ cinfo->image_width = (JDIMENSION)w; cinfo->image_height = (JDIMENSION)h; source->maxval = maxval; /* initialize flags to most common settings */ need_iobuffer = TRUE; /* do we need an I/O buffer? */ use_raw_buffer = FALSE; /* do we map input buffer onto I/O buffer? */ need_rescale = TRUE; /* do we need a rescale array? */ switch (c) { case '2': /* it's a text-format PGM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_GRAYSCALE; TRACEMS2(cinfo, 1, JTRC_PGM_TEXT, w, h); if (cinfo->in_color_space == JCS_GRAYSCALE) source->pub.get_pixel_rows = get_text_gray_row; else if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_text_gray_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_text_gray_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); need_iobuffer = FALSE; break; case '3': /* it's a text-format PPM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_EXT_RGB; TRACEMS2(cinfo, 1, JTRC_PPM_TEXT, w, h); if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_text_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_text_rgb_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); need_iobuffer = FALSE; break; case '5': /* it's a raw-format PGM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_GRAYSCALE; TRACEMS2(cinfo, 1, JTRC_PGM, w, h); if (maxval > 255) { source->pub.get_pixel_rows = get_word_gray_row; } else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) && cinfo->in_color_space == JCS_GRAYSCALE) { source->pub.get_pixel_rows = get_raw_row; use_raw_buffer = TRUE; need_rescale = FALSE; } else { if (cinfo->in_color_space == JCS_GRAYSCALE) source->pub.get_pixel_rows = get_scaled_gray_row; else if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_gray_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_gray_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); } break; case '6': /* it's a raw-format PPM file */ if (cinfo->in_color_space == JCS_UNKNOWN) cinfo->in_color_space = JCS_EXT_RGB; TRACEMS2(cinfo, 1, JTRC_PPM, w, h); if (maxval > 255) { source->pub.get_pixel_rows = get_word_rgb_row; } else if (maxval == MAXJSAMPLE && sizeof(JSAMPLE) == sizeof(U_CHAR) && (cinfo->in_color_space == JCS_EXT_RGB #if RGB_RED == 0 && RGB_GREEN == 1 && RGB_BLUE == 2 && RGB_PIXELSIZE == 3 || cinfo->in_color_space == JCS_RGB #endif )) { source->pub.get_pixel_rows = get_raw_row; use_raw_buffer = TRUE; need_rescale = FALSE; } else { if (IsExtRGB(cinfo->in_color_space)) source->pub.get_pixel_rows = get_rgb_row; else if (cinfo->in_color_space == JCS_CMYK) source->pub.get_pixel_rows = get_rgb_cmyk_row; else ERREXIT(cinfo, JERR_BAD_IN_COLORSPACE); } break; } if (IsExtRGB(cinfo->in_color_space)) cinfo->input_components = rgb_pixelsize[cinfo->in_color_space]; else if (cinfo->in_color_space == JCS_GRAYSCALE) cinfo->input_components = 1; else if (cinfo->in_color_space == JCS_CMYK) cinfo->input_components = 4; /* Allocate space for I/O buffer: 1 or 3 bytes or words/pixel. */ if (need_iobuffer) { if (c == '6') source->buffer_width = (size_t)w * 3 * ((maxval <= 255) ? sizeof(U_CHAR) : (2 * sizeof(U_CHAR))); else source->buffer_width = (size_t)w * ((maxval <= 255) ? sizeof(U_CHAR) : (2 * sizeof(U_CHAR))); source->iobuffer = (U_CHAR *) (*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_IMAGE, source->buffer_width); } /* Create compressor input buffer. */ if (use_raw_buffer) { /* For unscaled raw-input case, we can just map it onto the I/O buffer. */ /* Synthesize a JSAMPARRAY pointer structure */ source->pixrow = (JSAMPROW)source->iobuffer; source->pub.buffer = &source->pixrow; source->pub.buffer_height = 1; } else { /* Need to translate anyway, so make a separate sample buffer. */ source->pub.buffer = (*cinfo->mem->alloc_sarray) ((j_common_ptr)cinfo, JPOOL_IMAGE, (JDIMENSION)w * cinfo->input_components, (JDIMENSION)1); source->pub.buffer_height = 1; } /* Compute the rescaling array if required. */ if (need_rescale) { long val, half_maxval; /* On 16-bit-int machines we have to be careful of maxval = 65535 */ source->rescale = (JSAMPLE *) (*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_IMAGE, (size_t)(((long)MAX(maxval, 255) + 1L) * sizeof(JSAMPLE))); half_maxval = maxval / 2; for (val = 0; val <= (long)maxval; val++) { /* The multiplication here must be done in 32 bits to avoid overflow */ source->rescale[val] = (JSAMPLE)((val * MAXJSAMPLE + half_maxval) / maxval); } } }
{ "deleted": [ { "line_no": 163, "char_start": 6187, "char_end": 6252, "line": " (size_t)(((long)maxval + 1L) *\n" } ], "added": [ { "line_no": 163, "char_start": 6187, "char_end": 6262, "line": " (size_t)(((long)MAX(maxval, 255) + 1L) *\n" } ] }
{ "deleted": [], "added": [ { "char_start": 6237, "char_end": 6241, "chars": "MAX(" }, { "char_start": 6247, "char_end": 6253, "chars": ", 255)" } ] }
github.com/libjpeg-turbo/libjpeg-turbo/commit/3de15e0c344d11d4b90f4a47136467053eb2d09a
rdppm.c
cwe-125
snd_usb_create_streams
static int snd_usb_create_streams(struct snd_usb_audio *chip, int ctrlif) { struct usb_device *dev = chip->dev; struct usb_host_interface *host_iface; struct usb_interface_descriptor *altsd; void *control_header; int i, protocol; /* find audiocontrol interface */ host_iface = &usb_ifnum_to_if(dev, ctrlif)->altsetting[0]; control_header = snd_usb_find_csint_desc(host_iface->extra, host_iface->extralen, NULL, UAC_HEADER); altsd = get_iface_desc(host_iface); protocol = altsd->bInterfaceProtocol; if (!control_header) { dev_err(&dev->dev, "cannot find UAC_HEADER\n"); return -EINVAL; } switch (protocol) { default: dev_warn(&dev->dev, "unknown interface protocol %#02x, assuming v1\n", protocol); /* fall through */ case UAC_VERSION_1: { struct uac1_ac_header_descriptor *h1 = control_header; if (!h1->bInCollection) { dev_info(&dev->dev, "skipping empty audio interface (v1)\n"); return -EINVAL; } if (h1->bLength < sizeof(*h1) + h1->bInCollection) { dev_err(&dev->dev, "invalid UAC_HEADER (v1)\n"); return -EINVAL; } for (i = 0; i < h1->bInCollection; i++) snd_usb_create_stream(chip, ctrlif, h1->baInterfaceNr[i]); break; } case UAC_VERSION_2: { struct usb_interface_assoc_descriptor *assoc = usb_ifnum_to_if(dev, ctrlif)->intf_assoc; if (!assoc) { /* * Firmware writers cannot count to three. So to find * the IAD on the NuForce UDH-100, also check the next * interface. */ struct usb_interface *iface = usb_ifnum_to_if(dev, ctrlif + 1); if (iface && iface->intf_assoc && iface->intf_assoc->bFunctionClass == USB_CLASS_AUDIO && iface->intf_assoc->bFunctionProtocol == UAC_VERSION_2) assoc = iface->intf_assoc; } if (!assoc) { dev_err(&dev->dev, "Audio class v2 interfaces need an interface association\n"); return -EINVAL; } for (i = 0; i < assoc->bInterfaceCount; i++) { int intf = assoc->bFirstInterface + i; if (intf != ctrlif) snd_usb_create_stream(chip, ctrlif, intf); } break; } } return 0; }
static int snd_usb_create_streams(struct snd_usb_audio *chip, int ctrlif) { struct usb_device *dev = chip->dev; struct usb_host_interface *host_iface; struct usb_interface_descriptor *altsd; void *control_header; int i, protocol; int rest_bytes; /* find audiocontrol interface */ host_iface = &usb_ifnum_to_if(dev, ctrlif)->altsetting[0]; control_header = snd_usb_find_csint_desc(host_iface->extra, host_iface->extralen, NULL, UAC_HEADER); altsd = get_iface_desc(host_iface); protocol = altsd->bInterfaceProtocol; if (!control_header) { dev_err(&dev->dev, "cannot find UAC_HEADER\n"); return -EINVAL; } rest_bytes = (void *)(host_iface->extra + host_iface->extralen) - control_header; /* just to be sure -- this shouldn't hit at all */ if (rest_bytes <= 0) { dev_err(&dev->dev, "invalid control header\n"); return -EINVAL; } switch (protocol) { default: dev_warn(&dev->dev, "unknown interface protocol %#02x, assuming v1\n", protocol); /* fall through */ case UAC_VERSION_1: { struct uac1_ac_header_descriptor *h1 = control_header; if (rest_bytes < sizeof(*h1)) { dev_err(&dev->dev, "too short v1 buffer descriptor\n"); return -EINVAL; } if (!h1->bInCollection) { dev_info(&dev->dev, "skipping empty audio interface (v1)\n"); return -EINVAL; } if (rest_bytes < h1->bLength) { dev_err(&dev->dev, "invalid buffer length (v1)\n"); return -EINVAL; } if (h1->bLength < sizeof(*h1) + h1->bInCollection) { dev_err(&dev->dev, "invalid UAC_HEADER (v1)\n"); return -EINVAL; } for (i = 0; i < h1->bInCollection; i++) snd_usb_create_stream(chip, ctrlif, h1->baInterfaceNr[i]); break; } case UAC_VERSION_2: { struct usb_interface_assoc_descriptor *assoc = usb_ifnum_to_if(dev, ctrlif)->intf_assoc; if (!assoc) { /* * Firmware writers cannot count to three. So to find * the IAD on the NuForce UDH-100, also check the next * interface. */ struct usb_interface *iface = usb_ifnum_to_if(dev, ctrlif + 1); if (iface && iface->intf_assoc && iface->intf_assoc->bFunctionClass == USB_CLASS_AUDIO && iface->intf_assoc->bFunctionProtocol == UAC_VERSION_2) assoc = iface->intf_assoc; } if (!assoc) { dev_err(&dev->dev, "Audio class v2 interfaces need an interface association\n"); return -EINVAL; } for (i = 0; i < assoc->bInterfaceCount; i++) { int intf = assoc->bFirstInterface + i; if (intf != ctrlif) snd_usb_create_stream(chip, ctrlif, intf); } break; } } return 0; }
{ "deleted": [], "added": [ { "line_no": 8, "char_start": 235, "char_end": 252, "line": "\tint rest_bytes;\n" }, { "line_no": 23, "char_start": 637, "char_end": 704, "line": "\trest_bytes = (void *)(host_iface->extra + host_iface->extralen) -\n" }, { "line_no": 24, "char_start": 704, "char_end": 722, "line": "\t\tcontrol_header;\n" }, { "line_no": 25, "char_start": 722, "char_end": 723, "line": "\n" }, { "line_no": 26, "char_start": 723, "char_end": 775, "line": "\t/* just to be sure -- this shouldn't hit at all */\n" }, { "line_no": 27, "char_start": 775, "char_end": 799, "line": "\tif (rest_bytes <= 0) {\n" }, { "line_no": 28, "char_start": 799, "char_end": 849, "line": "\t\tdev_err(&dev->dev, \"invalid control header\\n\");\n" }, { "line_no": 29, "char_start": 849, "char_end": 867, "line": "\t\treturn -EINVAL;\n" }, { "line_no": 30, "char_start": 867, "char_end": 870, "line": "\t}\n" }, { "line_no": 31, "char_start": 870, "char_end": 871, "line": "\n" }, { "line_no": 42, "char_start": 1097, "char_end": 1131, "line": "\t\tif (rest_bytes < sizeof(*h1)) {\n" }, { "line_no": 43, "char_start": 1131, "char_end": 1190, "line": "\t\t\tdev_err(&dev->dev, \"too short v1 buffer descriptor\\n\");\n" }, { "line_no": 44, "char_start": 1190, "char_end": 1209, "line": "\t\t\treturn -EINVAL;\n" }, { "line_no": 45, "char_start": 1209, "char_end": 1213, "line": "\t\t}\n" }, { "line_no": 46, "char_start": 1213, "char_end": 1214, "line": "\n" }, { "line_no": 52, "char_start": 1331, "char_end": 1365, "line": "\t\tif (rest_bytes < h1->bLength) {\n" }, { "line_no": 53, "char_start": 1365, "char_end": 1420, "line": "\t\t\tdev_err(&dev->dev, \"invalid buffer length (v1)\\n\");\n" }, { "line_no": 54, "char_start": 1420, "char_end": 1439, "line": "\t\t\treturn -EINVAL;\n" }, { "line_no": 55, "char_start": 1439, "char_end": 1443, "line": "\t\t}\n" }, { "line_no": 56, "char_start": 1443, "char_end": 1444, "line": "\n" } ] }
{ "deleted": [], "added": [ { "char_start": 235, "char_end": 252, "chars": "\tint rest_bytes;\n" }, { "char_start": 638, "char_end": 872, "chars": "rest_bytes = (void *)(host_iface->extra + host_iface->extralen) -\n\t\tcontrol_header;\n\n\t/* just to be sure -- this shouldn't hit at all */\n\tif (rest_bytes <= 0) {\n\t\tdev_err(&dev->dev, \"invalid control header\\n\");\n\t\treturn -EINVAL;\n\t}\n\n\t" }, { "char_start": 1103, "char_end": 1220, "chars": "rest_bytes < sizeof(*h1)) {\n\t\t\tdev_err(&dev->dev, \"too short v1 buffer descriptor\\n\");\n\t\t\treturn -EINVAL;\n\t\t}\n\n\t\tif (" }, { "char_start": 1296, "char_end": 1409, "chars": " (v1)\\n\");\n\t\t\treturn -EINVAL;\n\t\t}\n\n\t\tif (rest_bytes < h1->bLength) {\n\t\t\tdev_err(&dev->dev, \"invalid buffer length" } ] }
github.com/torvalds/linux/commit/bfc81a8bc18e3c4ba0cbaa7666ff76be2f998991
sound/usb/card.c
cwe-125
process_packet_tail
void process_packet_tail(struct msg_digest *md) { struct state *st = md->st; enum state_kind from_state = md->v1_from_state; const struct state_v1_microcode *smc = md->smc; bool new_iv_set = md->new_iv_set; bool self_delete = FALSE; if (md->hdr.isa_flags & ISAKMP_FLAGS_v1_ENCRYPTION) { endpoint_buf b; dbg("received encrypted packet from %s", str_endpoint(&md->sender, &b)); if (st == NULL) { libreswan_log( "discarding encrypted message for an unknown ISAKMP SA"); return; } if (st->st_skeyid_e_nss == NULL) { loglog(RC_LOG_SERIOUS, "discarding encrypted message because we haven't yet negotiated keying material"); return; } /* Mark as encrypted */ md->encrypted = TRUE; /* do the specified decryption * * IV is from st->st_iv or (if new_iv_set) st->st_new_iv. * The new IV is placed in st->st_new_iv * * See RFC 2409 "IKE" Appendix B * * XXX The IV should only be updated really if the packet * is successfully processed. * We should keep this value, check for a success return * value from the parsing routines and then replace. * * Each post phase 1 exchange generates IVs from * the last phase 1 block, not the last block sent. */ const struct encrypt_desc *e = st->st_oakley.ta_encrypt; if (pbs_left(&md->message_pbs) % e->enc_blocksize != 0) { loglog(RC_LOG_SERIOUS, "malformed message: not a multiple of encryption blocksize"); return; } /* XXX Detect weak keys */ /* grab a copy of raw packet (for duplicate packet detection) */ md->raw_packet = clone_bytes_as_chunk(md->packet_pbs.start, pbs_room(&md->packet_pbs), "raw packet"); /* Decrypt everything after header */ if (!new_iv_set) { if (st->st_v1_iv.len == 0) { init_phase2_iv(st, &md->hdr.isa_msgid); } else { /* use old IV */ restore_new_iv(st, st->st_v1_iv); } } passert(st->st_v1_new_iv.len >= e->enc_blocksize); st->st_v1_new_iv.len = e->enc_blocksize; /* truncate */ if (DBGP(DBG_CRYPT)) { DBG_log("decrypting %u bytes using algorithm %s", (unsigned) pbs_left(&md->message_pbs), st->st_oakley.ta_encrypt->common.fqn); DBG_dump_hunk("IV before:", st->st_v1_new_iv); } e->encrypt_ops->do_crypt(e, md->message_pbs.cur, pbs_left(&md->message_pbs), st->st_enc_key_nss, st->st_v1_new_iv.ptr, FALSE); if (DBGP(DBG_CRYPT)) { DBG_dump_hunk("IV after:", st->st_v1_new_iv); DBG_log("decrypted payload (starts at offset %td):", md->message_pbs.cur - md->message_pbs.roof); DBG_dump(NULL, md->message_pbs.start, md->message_pbs.roof - md->message_pbs.start); } } else { /* packet was not encryped -- should it have been? */ if (smc->flags & SMF_INPUT_ENCRYPTED) { loglog(RC_LOG_SERIOUS, "packet rejected: should have been encrypted"); SEND_NOTIFICATION(INVALID_FLAGS); return; } } /* Digest the message. * Padding must be removed to make hashing work. * Padding comes from encryption (so this code must be after decryption). * Padding rules are described before the definition of * struct isakmp_hdr in packet.h. */ { enum next_payload_types_ikev1 np = md->hdr.isa_np; lset_t needed = smc->req_payloads; const char *excuse = LIN(SMF_PSK_AUTH | SMF_FIRST_ENCRYPTED_INPUT, smc->flags) ? "probable authentication failure (mismatch of preshared secrets?): " : ""; while (np != ISAKMP_NEXT_NONE) { struct_desc *sd = v1_payload_desc(np); if (md->digest_roof >= elemsof(md->digest)) { loglog(RC_LOG_SERIOUS, "more than %zu payloads in message; ignored", elemsof(md->digest)); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } struct payload_digest *const pd = md->digest + md->digest_roof; /* * only do this in main mode. In aggressive mode, there * is no negotiation of NAT-T method. Get it right. */ if (st != NULL && st->st_connection != NULL && (st->st_connection->policy & POLICY_AGGRESSIVE) == LEMPTY) { switch (np) { case ISAKMP_NEXT_NATD_RFC: case ISAKMP_NEXT_NATOA_RFC: if ((st->hidden_variables.st_nat_traversal & NAT_T_WITH_RFC_VALUES) == LEMPTY) { /* * don't accept NAT-D/NAT-OA reloc directly in message, * unless we're using NAT-T RFC */ DBG(DBG_NATT, DBG_log("st_nat_traversal was: %s", bitnamesof(natt_bit_names, st->hidden_variables.st_nat_traversal))); sd = NULL; } break; default: break; } } if (sd == NULL) { /* payload type is out of range or requires special handling */ switch (np) { case ISAKMP_NEXT_ID: /* ??? two kinds of ID payloads */ sd = (IS_PHASE1(from_state) || IS_PHASE15(from_state)) ? &isakmp_identification_desc : &isakmp_ipsec_identification_desc; break; case ISAKMP_NEXT_NATD_DRAFTS: /* out of range */ /* * ISAKMP_NEXT_NATD_DRAFTS was a private use type before RFC-3947. * Since it has the same format as ISAKMP_NEXT_NATD_RFC, * just rewrite np and sd, and carry on. */ np = ISAKMP_NEXT_NATD_RFC; sd = &isakmp_nat_d_drafts; break; case ISAKMP_NEXT_NATOA_DRAFTS: /* out of range */ /* NAT-OA was a private use type before RFC-3947 -- same format */ np = ISAKMP_NEXT_NATOA_RFC; sd = &isakmp_nat_oa_drafts; break; case ISAKMP_NEXT_SAK: /* or ISAKMP_NEXT_NATD_BADDRAFTS */ /* * Official standards say that this is ISAKMP_NEXT_SAK, * a part of Group DOI, something we don't implement. * Old non-updated Cisco gear abused this number in ancient NAT drafts. * We ignore (rather than reject) this in support of people * with crufty Cisco machines. */ loglog(RC_LOG_SERIOUS, "%smessage with unsupported payload ISAKMP_NEXT_SAK (or ISAKMP_NEXT_NATD_BADDRAFTS) ignored", excuse); /* * Hack to discard payload, whatever it was. * Since we are skipping the rest of the loop * body we must do some things ourself: * - demarshall the payload * - grab the next payload number (np) * - don't keep payload (don't increment pd) * - skip rest of loop body */ if (!in_struct(&pd->payload, &isakmp_ignore_desc, &md->message_pbs, &pd->pbs)) { loglog(RC_LOG_SERIOUS, "%smalformed payload in packet", excuse); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } np = pd->payload.generic.isag_np; /* NOTE: we do not increment pd! */ continue; /* skip rest of the loop */ default: loglog(RC_LOG_SERIOUS, "%smessage ignored because it contains an unknown or unexpected payload type (%s) at the outermost level", excuse, enum_show(&ikev1_payload_names, np)); if (!md->encrypted) { SEND_NOTIFICATION(INVALID_PAYLOAD_TYPE); } return; } passert(sd != NULL); } passert(np < LELEM_ROOF); { lset_t s = LELEM(np); if (LDISJOINT(s, needed | smc->opt_payloads | LELEM(ISAKMP_NEXT_VID) | LELEM(ISAKMP_NEXT_N) | LELEM(ISAKMP_NEXT_D) | LELEM(ISAKMP_NEXT_CR) | LELEM(ISAKMP_NEXT_CERT))) { loglog(RC_LOG_SERIOUS, "%smessage ignored because it contains a payload type (%s) unexpected by state %s", excuse, enum_show(&ikev1_payload_names, np), st->st_state->name); if (!md->encrypted) { SEND_NOTIFICATION(INVALID_PAYLOAD_TYPE); } return; } DBG(DBG_PARSING, DBG_log("got payload 0x%" PRIxLSET" (%s) needed: 0x%" PRIxLSET " opt: 0x%" PRIxLSET, s, enum_show(&ikev1_payload_names, np), needed, smc->opt_payloads)); needed &= ~s; } /* * Read in the payload recording what type it * should be */ pd->payload_type = np; if (!in_struct(&pd->payload, sd, &md->message_pbs, &pd->pbs)) { loglog(RC_LOG_SERIOUS, "%smalformed payload in packet", excuse); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } /* do payload-type specific debugging */ switch (np) { case ISAKMP_NEXT_ID: case ISAKMP_NEXT_NATOA_RFC: /* dump ID section */ DBG(DBG_PARSING, DBG_dump(" obj: ", pd->pbs.cur, pbs_left(&pd->pbs))); break; default: break; } /* * Place payload at the end of the chain for this type. * This code appears in ikev1.c and ikev2.c. */ { /* np is a proper subscript for chain[] */ passert(np < elemsof(md->chain)); struct payload_digest **p = &md->chain[np]; while (*p != NULL) p = &(*p)->next; *p = pd; pd->next = NULL; } np = pd->payload.generic.isag_np; md->digest_roof++; /* since we've digested one payload happily, it is probably * the case that any decryption worked. So we will not suggest * encryption failure as an excuse for subsequent payload * problems. */ excuse = ""; } DBG(DBG_PARSING, { if (pbs_left(&md->message_pbs) != 0) DBG_log("removing %d bytes of padding", (int) pbs_left(&md->message_pbs)); }); md->message_pbs.roof = md->message_pbs.cur; /* check that all mandatory payloads appeared */ if (needed != 0) { loglog(RC_LOG_SERIOUS, "message for %s is missing payloads %s", finite_states[from_state]->name, bitnamesof(payload_name_ikev1, needed)); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } } if (!check_v1_HASH(smc->hash_type, smc->message, st, md)) { /*SEND_NOTIFICATION(INVALID_HASH_INFORMATION);*/ return; } /* more sanity checking: enforce most ordering constraints */ if (IS_PHASE1(from_state) || IS_PHASE15(from_state)) { /* rfc2409: The Internet Key Exchange (IKE), 5 Exchanges: * "The SA payload MUST precede all other payloads in a phase 1 exchange." */ if (md->chain[ISAKMP_NEXT_SA] != NULL && md->hdr.isa_np != ISAKMP_NEXT_SA) { loglog(RC_LOG_SERIOUS, "malformed Phase 1 message: does not start with an SA payload"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } } else if (IS_QUICK(from_state)) { /* rfc2409: The Internet Key Exchange (IKE), 5.5 Phase 2 - Quick Mode * * "In Quick Mode, a HASH payload MUST immediately follow the ISAKMP * header and a SA payload MUST immediately follow the HASH." * [NOTE: there may be more than one SA payload, so this is not * totally reasonable. Probably all SAs should be so constrained.] * * "If ISAKMP is acting as a client negotiator on behalf of another * party, the identities of the parties MUST be passed as IDci and * then IDcr." * * "With the exception of the HASH, SA, and the optional ID payloads, * there are no payload ordering restrictions on Quick Mode." */ if (md->hdr.isa_np != ISAKMP_NEXT_HASH) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: does not start with a HASH payload"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } { struct payload_digest *p; int i; p = md->chain[ISAKMP_NEXT_SA]; i = 1; while (p != NULL) { if (p != &md->digest[i]) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: SA payload is in wrong position"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } p = p->next; i++; } } /* rfc2409: The Internet Key Exchange (IKE), 5.5 Phase 2 - Quick Mode: * "If ISAKMP is acting as a client negotiator on behalf of another * party, the identities of the parties MUST be passed as IDci and * then IDcr." */ { struct payload_digest *id = md->chain[ISAKMP_NEXT_ID]; if (id != NULL) { if (id->next == NULL || id->next->next != NULL) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: if any ID payload is present, there must be exactly two"); SEND_NOTIFICATION(PAYLOAD_MALFORMED); return; } if (id + 1 != id->next) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: the ID payloads are not adjacent"); SEND_NOTIFICATION(PAYLOAD_MALFORMED); return; } } } } /* * Ignore payloads that we don't handle: */ /* XXX Handle Notifications */ { struct payload_digest *p = md->chain[ISAKMP_NEXT_N]; while (p != NULL) { switch (p->payload.notification.isan_type) { case R_U_THERE: case R_U_THERE_ACK: case ISAKMP_N_CISCO_LOAD_BALANCE: case PAYLOAD_MALFORMED: case INVALID_MESSAGE_ID: case IPSEC_RESPONDER_LIFETIME: if (md->hdr.isa_xchg == ISAKMP_XCHG_INFO) { /* these are handled later on in informational() */ break; } /* FALL THROUGH */ default: if (st == NULL) { DBG(DBG_CONTROL, DBG_log( "ignoring informational payload %s, no corresponding state", enum_show(& ikev1_notify_names, p->payload.notification.isan_type))); } else { loglog(RC_LOG_SERIOUS, "ignoring informational payload %s, msgid=%08" PRIx32 ", length=%d", enum_show(&ikev1_notify_names, p->payload.notification.isan_type), st->st_v1_msgid.id, p->payload.notification.isan_length); DBG_dump_pbs(&p->pbs); } } if (DBGP(DBG_BASE)) { DBG_dump("info:", p->pbs.cur, pbs_left(&p->pbs)); } p = p->next; } p = md->chain[ISAKMP_NEXT_D]; while (p != NULL) { self_delete |= accept_delete(md, p); if (DBGP(DBG_BASE)) { DBG_dump("del:", p->pbs.cur, pbs_left(&p->pbs)); } if (md->st != st) { pexpect(md->st == NULL); dbg("zapping ST as accept_delete() zapped MD.ST"); st = md->st; } p = p->next; } p = md->chain[ISAKMP_NEXT_VID]; while (p != NULL) { handle_vendorid(md, (char *)p->pbs.cur, pbs_left(&p->pbs), FALSE); p = p->next; } } if (self_delete) { accept_self_delete(md); st = md->st; /* note: st ought to be NULL from here on */ } pexpect(st == md->st); statetime_t start = statetime_start(md->st); /* * XXX: danger - the .informational() processor deletes ST; * and then tunnels this loss through MD.ST. */ complete_v1_state_transition(md, smc->processor(st, md)); statetime_stop(&start, "%s()", __func__); /* our caller will release_any_md(mdp); */ }
void process_packet_tail(struct msg_digest *md) { struct state *st = md->st; enum state_kind from_state = md->v1_from_state; const struct state_v1_microcode *smc = md->smc; bool new_iv_set = md->new_iv_set; bool self_delete = FALSE; if (md->hdr.isa_flags & ISAKMP_FLAGS_v1_ENCRYPTION) { endpoint_buf b; dbg("received encrypted packet from %s", str_endpoint(&md->sender, &b)); if (st == NULL) { libreswan_log( "discarding encrypted message for an unknown ISAKMP SA"); return; } if (st->st_skeyid_e_nss == NULL) { loglog(RC_LOG_SERIOUS, "discarding encrypted message because we haven't yet negotiated keying material"); return; } /* Mark as encrypted */ md->encrypted = TRUE; /* do the specified decryption * * IV is from st->st_iv or (if new_iv_set) st->st_new_iv. * The new IV is placed in st->st_new_iv * * See RFC 2409 "IKE" Appendix B * * XXX The IV should only be updated really if the packet * is successfully processed. * We should keep this value, check for a success return * value from the parsing routines and then replace. * * Each post phase 1 exchange generates IVs from * the last phase 1 block, not the last block sent. */ const struct encrypt_desc *e = st->st_oakley.ta_encrypt; if (pbs_left(&md->message_pbs) % e->enc_blocksize != 0) { loglog(RC_LOG_SERIOUS, "malformed message: not a multiple of encryption blocksize"); return; } /* XXX Detect weak keys */ /* grab a copy of raw packet (for duplicate packet detection) */ md->raw_packet = clone_bytes_as_chunk(md->packet_pbs.start, pbs_room(&md->packet_pbs), "raw packet"); /* Decrypt everything after header */ if (!new_iv_set) { if (st->st_v1_iv.len == 0) { init_phase2_iv(st, &md->hdr.isa_msgid); } else { /* use old IV */ restore_new_iv(st, st->st_v1_iv); } } passert(st->st_v1_new_iv.len >= e->enc_blocksize); st->st_v1_new_iv.len = e->enc_blocksize; /* truncate */ if (DBGP(DBG_CRYPT)) { DBG_log("decrypting %u bytes using algorithm %s", (unsigned) pbs_left(&md->message_pbs), st->st_oakley.ta_encrypt->common.fqn); DBG_dump_hunk("IV before:", st->st_v1_new_iv); } e->encrypt_ops->do_crypt(e, md->message_pbs.cur, pbs_left(&md->message_pbs), st->st_enc_key_nss, st->st_v1_new_iv.ptr, FALSE); if (DBGP(DBG_CRYPT)) { DBG_dump_hunk("IV after:", st->st_v1_new_iv); DBG_log("decrypted payload (starts at offset %td):", md->message_pbs.cur - md->message_pbs.roof); DBG_dump(NULL, md->message_pbs.start, md->message_pbs.roof - md->message_pbs.start); } } else { /* packet was not encryped -- should it have been? */ if (smc->flags & SMF_INPUT_ENCRYPTED) { loglog(RC_LOG_SERIOUS, "packet rejected: should have been encrypted"); SEND_NOTIFICATION(INVALID_FLAGS); return; } } /* Digest the message. * Padding must be removed to make hashing work. * Padding comes from encryption (so this code must be after decryption). * Padding rules are described before the definition of * struct isakmp_hdr in packet.h. */ { enum next_payload_types_ikev1 np = md->hdr.isa_np; lset_t needed = smc->req_payloads; const char *excuse = LIN(SMF_PSK_AUTH | SMF_FIRST_ENCRYPTED_INPUT, smc->flags) ? "probable authentication failure (mismatch of preshared secrets?): " : ""; while (np != ISAKMP_NEXT_NONE) { struct_desc *sd = v1_payload_desc(np); if (md->digest_roof >= elemsof(md->digest)) { loglog(RC_LOG_SERIOUS, "more than %zu payloads in message; ignored", elemsof(md->digest)); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } struct payload_digest *const pd = md->digest + md->digest_roof; /* * only do this in main mode. In aggressive mode, there * is no negotiation of NAT-T method. Get it right. */ if (st != NULL && st->st_connection != NULL && (st->st_connection->policy & POLICY_AGGRESSIVE) == LEMPTY) { switch (np) { case ISAKMP_NEXT_NATD_RFC: case ISAKMP_NEXT_NATOA_RFC: if ((st->hidden_variables.st_nat_traversal & NAT_T_WITH_RFC_VALUES) == LEMPTY) { /* * don't accept NAT-D/NAT-OA reloc directly in message, * unless we're using NAT-T RFC */ DBG(DBG_NATT, DBG_log("st_nat_traversal was: %s", bitnamesof(natt_bit_names, st->hidden_variables.st_nat_traversal))); sd = NULL; } break; default: break; } } if (sd == NULL) { /* payload type is out of range or requires special handling */ switch (np) { case ISAKMP_NEXT_ID: /* ??? two kinds of ID payloads */ sd = (IS_PHASE1(from_state) || IS_PHASE15(from_state)) ? &isakmp_identification_desc : &isakmp_ipsec_identification_desc; break; case ISAKMP_NEXT_NATD_DRAFTS: /* out of range */ /* * ISAKMP_NEXT_NATD_DRAFTS was a private use type before RFC-3947. * Since it has the same format as ISAKMP_NEXT_NATD_RFC, * just rewrite np and sd, and carry on. */ np = ISAKMP_NEXT_NATD_RFC; sd = &isakmp_nat_d_drafts; break; case ISAKMP_NEXT_NATOA_DRAFTS: /* out of range */ /* NAT-OA was a private use type before RFC-3947 -- same format */ np = ISAKMP_NEXT_NATOA_RFC; sd = &isakmp_nat_oa_drafts; break; case ISAKMP_NEXT_SAK: /* or ISAKMP_NEXT_NATD_BADDRAFTS */ /* * Official standards say that this is ISAKMP_NEXT_SAK, * a part of Group DOI, something we don't implement. * Old non-updated Cisco gear abused this number in ancient NAT drafts. * We ignore (rather than reject) this in support of people * with crufty Cisco machines. */ loglog(RC_LOG_SERIOUS, "%smessage with unsupported payload ISAKMP_NEXT_SAK (or ISAKMP_NEXT_NATD_BADDRAFTS) ignored", excuse); /* * Hack to discard payload, whatever it was. * Since we are skipping the rest of the loop * body we must do some things ourself: * - demarshall the payload * - grab the next payload number (np) * - don't keep payload (don't increment pd) * - skip rest of loop body */ if (!in_struct(&pd->payload, &isakmp_ignore_desc, &md->message_pbs, &pd->pbs)) { loglog(RC_LOG_SERIOUS, "%smalformed payload in packet", excuse); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } np = pd->payload.generic.isag_np; /* NOTE: we do not increment pd! */ continue; /* skip rest of the loop */ default: loglog(RC_LOG_SERIOUS, "%smessage ignored because it contains an unknown or unexpected payload type (%s) at the outermost level", excuse, enum_show(&ikev1_payload_names, np)); if (!md->encrypted) { SEND_NOTIFICATION(INVALID_PAYLOAD_TYPE); } return; } passert(sd != NULL); } passert(np < LELEM_ROOF); { lset_t s = LELEM(np); if (LDISJOINT(s, needed | smc->opt_payloads | LELEM(ISAKMP_NEXT_VID) | LELEM(ISAKMP_NEXT_N) | LELEM(ISAKMP_NEXT_D) | LELEM(ISAKMP_NEXT_CR) | LELEM(ISAKMP_NEXT_CERT))) { loglog(RC_LOG_SERIOUS, "%smessage ignored because it contains a payload type (%s) unexpected by state %s", excuse, enum_show(&ikev1_payload_names, np), finite_states[smc->state]->name); if (!md->encrypted) { SEND_NOTIFICATION(INVALID_PAYLOAD_TYPE); } return; } DBG(DBG_PARSING, DBG_log("got payload 0x%" PRIxLSET" (%s) needed: 0x%" PRIxLSET " opt: 0x%" PRIxLSET, s, enum_show(&ikev1_payload_names, np), needed, smc->opt_payloads)); needed &= ~s; } /* * Read in the payload recording what type it * should be */ pd->payload_type = np; if (!in_struct(&pd->payload, sd, &md->message_pbs, &pd->pbs)) { loglog(RC_LOG_SERIOUS, "%smalformed payload in packet", excuse); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } /* do payload-type specific debugging */ switch (np) { case ISAKMP_NEXT_ID: case ISAKMP_NEXT_NATOA_RFC: /* dump ID section */ DBG(DBG_PARSING, DBG_dump(" obj: ", pd->pbs.cur, pbs_left(&pd->pbs))); break; default: break; } /* * Place payload at the end of the chain for this type. * This code appears in ikev1.c and ikev2.c. */ { /* np is a proper subscript for chain[] */ passert(np < elemsof(md->chain)); struct payload_digest **p = &md->chain[np]; while (*p != NULL) p = &(*p)->next; *p = pd; pd->next = NULL; } np = pd->payload.generic.isag_np; md->digest_roof++; /* since we've digested one payload happily, it is probably * the case that any decryption worked. So we will not suggest * encryption failure as an excuse for subsequent payload * problems. */ excuse = ""; } DBG(DBG_PARSING, { if (pbs_left(&md->message_pbs) != 0) DBG_log("removing %d bytes of padding", (int) pbs_left(&md->message_pbs)); }); md->message_pbs.roof = md->message_pbs.cur; /* check that all mandatory payloads appeared */ if (needed != 0) { loglog(RC_LOG_SERIOUS, "message for %s is missing payloads %s", finite_states[from_state]->name, bitnamesof(payload_name_ikev1, needed)); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } } if (!check_v1_HASH(smc->hash_type, smc->message, st, md)) { /*SEND_NOTIFICATION(INVALID_HASH_INFORMATION);*/ return; } /* more sanity checking: enforce most ordering constraints */ if (IS_PHASE1(from_state) || IS_PHASE15(from_state)) { /* rfc2409: The Internet Key Exchange (IKE), 5 Exchanges: * "The SA payload MUST precede all other payloads in a phase 1 exchange." */ if (md->chain[ISAKMP_NEXT_SA] != NULL && md->hdr.isa_np != ISAKMP_NEXT_SA) { loglog(RC_LOG_SERIOUS, "malformed Phase 1 message: does not start with an SA payload"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } } else if (IS_QUICK(from_state)) { /* rfc2409: The Internet Key Exchange (IKE), 5.5 Phase 2 - Quick Mode * * "In Quick Mode, a HASH payload MUST immediately follow the ISAKMP * header and a SA payload MUST immediately follow the HASH." * [NOTE: there may be more than one SA payload, so this is not * totally reasonable. Probably all SAs should be so constrained.] * * "If ISAKMP is acting as a client negotiator on behalf of another * party, the identities of the parties MUST be passed as IDci and * then IDcr." * * "With the exception of the HASH, SA, and the optional ID payloads, * there are no payload ordering restrictions on Quick Mode." */ if (md->hdr.isa_np != ISAKMP_NEXT_HASH) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: does not start with a HASH payload"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } { struct payload_digest *p; int i; p = md->chain[ISAKMP_NEXT_SA]; i = 1; while (p != NULL) { if (p != &md->digest[i]) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: SA payload is in wrong position"); if (!md->encrypted) { SEND_NOTIFICATION(PAYLOAD_MALFORMED); } return; } p = p->next; i++; } } /* rfc2409: The Internet Key Exchange (IKE), 5.5 Phase 2 - Quick Mode: * "If ISAKMP is acting as a client negotiator on behalf of another * party, the identities of the parties MUST be passed as IDci and * then IDcr." */ { struct payload_digest *id = md->chain[ISAKMP_NEXT_ID]; if (id != NULL) { if (id->next == NULL || id->next->next != NULL) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: if any ID payload is present, there must be exactly two"); SEND_NOTIFICATION(PAYLOAD_MALFORMED); return; } if (id + 1 != id->next) { loglog(RC_LOG_SERIOUS, "malformed Quick Mode message: the ID payloads are not adjacent"); SEND_NOTIFICATION(PAYLOAD_MALFORMED); return; } } } } /* * Ignore payloads that we don't handle: */ /* XXX Handle Notifications */ { struct payload_digest *p = md->chain[ISAKMP_NEXT_N]; while (p != NULL) { switch (p->payload.notification.isan_type) { case R_U_THERE: case R_U_THERE_ACK: case ISAKMP_N_CISCO_LOAD_BALANCE: case PAYLOAD_MALFORMED: case INVALID_MESSAGE_ID: case IPSEC_RESPONDER_LIFETIME: if (md->hdr.isa_xchg == ISAKMP_XCHG_INFO) { /* these are handled later on in informational() */ break; } /* FALL THROUGH */ default: if (st == NULL) { DBG(DBG_CONTROL, DBG_log( "ignoring informational payload %s, no corresponding state", enum_show(& ikev1_notify_names, p->payload.notification.isan_type))); } else { loglog(RC_LOG_SERIOUS, "ignoring informational payload %s, msgid=%08" PRIx32 ", length=%d", enum_show(&ikev1_notify_names, p->payload.notification.isan_type), st->st_v1_msgid.id, p->payload.notification.isan_length); DBG_dump_pbs(&p->pbs); } } if (DBGP(DBG_BASE)) { DBG_dump("info:", p->pbs.cur, pbs_left(&p->pbs)); } p = p->next; } p = md->chain[ISAKMP_NEXT_D]; while (p != NULL) { self_delete |= accept_delete(md, p); if (DBGP(DBG_BASE)) { DBG_dump("del:", p->pbs.cur, pbs_left(&p->pbs)); } if (md->st != st) { pexpect(md->st == NULL); dbg("zapping ST as accept_delete() zapped MD.ST"); st = md->st; } p = p->next; } p = md->chain[ISAKMP_NEXT_VID]; while (p != NULL) { handle_vendorid(md, (char *)p->pbs.cur, pbs_left(&p->pbs), FALSE); p = p->next; } } if (self_delete) { accept_self_delete(md); st = md->st; /* note: st ought to be NULL from here on */ } pexpect(st == md->st); statetime_t start = statetime_start(md->st); /* * XXX: danger - the .informational() processor deletes ST; * and then tunnels this loss through MD.ST. */ complete_v1_state_transition(md, smc->processor(st, md)); statetime_stop(&start, "%s()", __func__); /* our caller will release_any_md(mdp); */ }
{ "deleted": [ { "line_no": 244, "char_start": 7553, "char_end": 7580, "line": "\t\t\t\t\t\tst->st_state->name);\n" } ], "added": [ { "line_no": 244, "char_start": 7553, "char_end": 7593, "line": "\t\t\t\t\t\tfinite_states[smc->state]->name);\n" } ] }
{ "deleted": [ { "char_start": 7565, "char_end": 7568, "chars": "_st" } ], "added": [ { "char_start": 7559, "char_end": 7566, "chars": "finite_" }, { "char_start": 7568, "char_end": 7576, "chars": "ates[smc" }, { "char_start": 7583, "char_end": 7584, "chars": "]" } ] }
github.com/libreswan/libreswan/commit/471a3e41a449d7c753bc4edbba4239501bb62ba8
programs/pluto/ikev1.c
cwe-125
CSoundFile::GetLength
std::vector<GetLengthType> CSoundFile::GetLength(enmGetLengthResetMode adjustMode, GetLengthTarget target) { std::vector<GetLengthType> results; GetLengthType retval; retval.startOrder = target.startOrder; retval.startRow = target.startRow; // Are we trying to reach a certain pattern position? const bool hasSearchTarget = target.mode != GetLengthTarget::NoTarget; const bool adjustSamplePos = (adjustMode & eAdjustSamplePositions) == eAdjustSamplePositions; SEQUENCEINDEX sequence = target.sequence; if(sequence >= Order.GetNumSequences()) sequence = Order.GetCurrentSequenceIndex(); const ModSequence &orderList = Order(sequence); GetLengthMemory memory(*this); CSoundFile::PlayState &playState = *memory.state; // Temporary visited rows vector (so that GetLength() won't interfere with the player code if the module is playing at the same time) RowVisitor visitedRows(*this, sequence); playState.m_nNextRow = playState.m_nRow = target.startRow; playState.m_nNextOrder = playState.m_nCurrentOrder = target.startOrder; // Fast LUTs for commands that are too weird / complicated / whatever to emulate in sample position adjust mode. std::bitset<MAX_EFFECTS> forbiddenCommands; std::bitset<MAX_VOLCMDS> forbiddenVolCommands; if(adjustSamplePos) { forbiddenCommands.set(CMD_ARPEGGIO); forbiddenCommands.set(CMD_PORTAMENTOUP); forbiddenCommands.set(CMD_PORTAMENTODOWN); forbiddenCommands.set(CMD_XFINEPORTAUPDOWN); forbiddenCommands.set(CMD_NOTESLIDEUP); forbiddenCommands.set(CMD_NOTESLIDEUPRETRIG); forbiddenCommands.set(CMD_NOTESLIDEDOWN); forbiddenCommands.set(CMD_NOTESLIDEDOWNRETRIG); forbiddenVolCommands.set(VOLCMD_PORTAUP); forbiddenVolCommands.set(VOLCMD_PORTADOWN); // Optimize away channels for which it's pointless to adjust sample positions for(CHANNELINDEX i = 0; i < GetNumChannels(); i++) { if(ChnSettings[i].dwFlags[CHN_MUTE]) memory.chnSettings[i].ticksToRender = GetLengthMemory::IGNORE_CHANNEL; } if(target.mode == GetLengthTarget::SeekPosition && target.pos.order < orderList.size()) { // If we know where to seek, we can directly rule out any channels on which a new note would be triggered right at the start. const PATTERNINDEX seekPat = orderList[target.pos.order]; if(Patterns.IsValidPat(seekPat) && Patterns[seekPat].IsValidRow(target.pos.row)) { const ModCommand *m = Patterns[seekPat].GetRow(target.pos.row); for(CHANNELINDEX i = 0; i < GetNumChannels(); i++, m++) { if(m->note == NOTE_NOTECUT || m->note == NOTE_KEYOFF || (m->note == NOTE_FADE && GetNumInstruments()) || (m->IsNote() && !m->IsPortamento())) { memory.chnSettings[i].ticksToRender = GetLengthMemory::IGNORE_CHANNEL; } } } } } // If samples are being synced, force them to resync if tick duration changes uint32 oldTickDuration = 0; for (;;) { // Time target reached. if(target.mode == GetLengthTarget::SeekSeconds && memory.elapsedTime >= target.time) { retval.targetReached = true; break; } uint32 rowDelay = 0, tickDelay = 0; playState.m_nRow = playState.m_nNextRow; playState.m_nCurrentOrder = playState.m_nNextOrder; if(orderList.IsValidPat(playState.m_nCurrentOrder) && playState.m_nRow >= Patterns[orderList[playState.m_nCurrentOrder]].GetNumRows()) { playState.m_nRow = 0; if(m_playBehaviour[kFT2LoopE60Restart]) { playState.m_nRow = playState.m_nNextPatStartRow; playState.m_nNextPatStartRow = 0; } playState.m_nCurrentOrder = ++playState.m_nNextOrder; } // Check if pattern is valid playState.m_nPattern = playState.m_nCurrentOrder < orderList.size() ? orderList[playState.m_nCurrentOrder] : orderList.GetInvalidPatIndex(); bool positionJumpOnThisRow = false; bool patternBreakOnThisRow = false; bool patternLoopEndedOnThisRow = false, patternLoopStartedOnThisRow = false; if(!Patterns.IsValidPat(playState.m_nPattern) && playState.m_nPattern != orderList.GetInvalidPatIndex() && target.mode == GetLengthTarget::SeekPosition && playState.m_nCurrentOrder == target.pos.order) { // Early test: Target is inside +++ or non-existing pattern retval.targetReached = true; break; } while(playState.m_nPattern >= Patterns.Size()) { // End of song? if((playState.m_nPattern == orderList.GetInvalidPatIndex()) || (playState.m_nCurrentOrder >= orderList.size())) { if(playState.m_nCurrentOrder == orderList.GetRestartPos()) break; else playState.m_nCurrentOrder = orderList.GetRestartPos(); } else { playState.m_nCurrentOrder++; } playState.m_nPattern = (playState.m_nCurrentOrder < orderList.size()) ? orderList[playState.m_nCurrentOrder] : orderList.GetInvalidPatIndex(); playState.m_nNextOrder = playState.m_nCurrentOrder; if((!Patterns.IsValidPat(playState.m_nPattern)) && visitedRows.IsVisited(playState.m_nCurrentOrder, 0, true)) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nCurrentOrder = playState.m_nNextOrder; playState.m_nPattern = orderList[playState.m_nCurrentOrder]; playState.m_nNextRow = playState.m_nRow; break; } } } if(playState.m_nNextOrder == ORDERINDEX_INVALID) { // GetFirstUnvisitedRow failed, so there is nothing more to play break; } // Skip non-existing patterns if(!Patterns.IsValidPat(playState.m_nPattern)) { // If there isn't even a tune, we should probably stop here. if(playState.m_nCurrentOrder == orderList.GetRestartPos()) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nNextRow = playState.m_nRow; continue; } } playState.m_nNextOrder = playState.m_nCurrentOrder + 1; continue; } // Should never happen if(playState.m_nRow >= Patterns[playState.m_nPattern].GetNumRows()) playState.m_nRow = 0; // Check whether target was reached. if(target.mode == GetLengthTarget::SeekPosition && playState.m_nCurrentOrder == target.pos.order && playState.m_nRow == target.pos.row) { retval.targetReached = true; break; } if(visitedRows.IsVisited(playState.m_nCurrentOrder, playState.m_nRow, true)) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nNextRow = playState.m_nRow; continue; } } retval.endOrder = playState.m_nCurrentOrder; retval.endRow = playState.m_nRow; // Update next position playState.m_nNextRow = playState.m_nRow + 1; // Jumped to invalid pattern row? if(playState.m_nRow >= Patterns[playState.m_nPattern].GetNumRows()) { playState.m_nRow = 0; } // New pattern? if(!playState.m_nRow) { for(CHANNELINDEX chn = 0; chn < GetNumChannels(); chn++) { memory.chnSettings[chn].patLoop = memory.elapsedTime; memory.chnSettings[chn].patLoopSmp = playState.m_lTotalSampleCount; } } ModChannel *pChn = playState.Chn; // For various effects, we need to know first how many ticks there are in this row. const ModCommand *p = Patterns[playState.m_nPattern].GetpModCommand(playState.m_nRow, 0); for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, p++) { if(m_playBehaviour[kST3NoMutedChannels] && ChnSettings[nChn].dwFlags[CHN_MUTE]) // not even effects are processed on muted S3M channels continue; if(p->IsPcNote()) { #ifndef NO_PLUGINS if((adjustMode & eAdjust) && p->instr > 0 && p->instr <= MAX_MIXPLUGINS) { memory.plugParams[std::make_pair(p->instr, p->GetValueVolCol())] = p->GetValueEffectCol(); } #endif // NO_PLUGINS pChn[nChn].rowCommand.Clear(); continue; } pChn[nChn].rowCommand = *p; switch(p->command) { case CMD_SPEED: SetSpeed(playState, p->param); break; case CMD_TEMPO: if(m_playBehaviour[kMODVBlankTiming]) { // ProTracker MODs with VBlank timing: All Fxx parameters set the tick count. if(p->param != 0) SetSpeed(playState, p->param); } break; case CMD_S3MCMDEX: if((p->param & 0xF0) == 0x60) { // Fine Pattern Delay tickDelay += (p->param & 0x0F); } else if((p->param & 0xF0) == 0xE0 && !rowDelay) { // Pattern Delay if(!(GetType() & MOD_TYPE_S3M) || (p->param & 0x0F) != 0) { // While Impulse Tracker *does* count S60 as a valid row delay (and thus ignores any other row delay commands on the right), // Scream Tracker 3 simply ignores such commands. rowDelay = 1 + (p->param & 0x0F); } } break; case CMD_MODCMDEX: if((p->param & 0xF0) == 0xE0) { // Pattern Delay rowDelay = 1 + (p->param & 0x0F); } break; } } if(rowDelay == 0) rowDelay = 1; const uint32 numTicks = (playState.m_nMusicSpeed + tickDelay) * rowDelay; const uint32 nonRowTicks = numTicks - rowDelay; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); pChn++, nChn++) if(!pChn->rowCommand.IsEmpty()) { if(m_playBehaviour[kST3NoMutedChannels] && ChnSettings[nChn].dwFlags[CHN_MUTE]) // not even effects are processed on muted S3M channels continue; ModCommand::COMMAND command = pChn->rowCommand.command; ModCommand::PARAM param = pChn->rowCommand.param; ModCommand::NOTE note = pChn->rowCommand.note; if (pChn->rowCommand.instr) { pChn->nNewIns = pChn->rowCommand.instr; pChn->nLastNote = NOTE_NONE; memory.chnSettings[nChn].vol = 0xFF; } if (pChn->rowCommand.IsNote()) pChn->nLastNote = note; // Update channel panning if(pChn->rowCommand.IsNote() || pChn->rowCommand.instr) { SAMPLEINDEX smp = 0; if(GetNumInstruments()) { ModInstrument *pIns; if(pChn->nNewIns <= GetNumInstruments() && (pIns = Instruments[pChn->nNewIns]) != nullptr) { if(pIns->dwFlags[INS_SETPANNING]) pChn->nPan = pIns->nPan; if(ModCommand::IsNote(note)) smp = pIns->Keyboard[note - NOTE_MIN]; } } else { smp = pChn->nNewIns; } if(smp > 0 && smp <= GetNumSamples() && Samples[smp].uFlags[CHN_PANNING]) { pChn->nPan = Samples[smp].nPan; } } switch(pChn->rowCommand.volcmd) { case VOLCMD_VOLUME: memory.chnSettings[nChn].vol = pChn->rowCommand.vol; break; case VOLCMD_VOLSLIDEUP: case VOLCMD_VOLSLIDEDOWN: if(pChn->rowCommand.vol != 0) pChn->nOldVolParam = pChn->rowCommand.vol; break; } switch(command) { // Position Jump case CMD_POSITIONJUMP: positionJumpOnThisRow = true; playState.m_nNextOrder = static_cast<ORDERINDEX>(CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn)); playState.m_nNextPatStartRow = 0; // FT2 E60 bug // see https://forum.openmpt.org/index.php?topic=2769.0 - FastTracker resets Dxx if Bxx is called _after_ Dxx // Test case: PatternJump.mod if(!patternBreakOnThisRow || (GetType() & (MOD_TYPE_MOD | MOD_TYPE_XM))) playState.m_nNextRow = 0; if (adjustMode & eAdjust) { pChn->nPatternLoopCount = 0; pChn->nPatternLoop = 0; } break; // Pattern Break case CMD_PATTERNBREAK: { ROWINDEX row = PatternBreak(playState, nChn, param); if(row != ROWINDEX_INVALID) { patternBreakOnThisRow = true; playState.m_nNextRow = row; if(!positionJumpOnThisRow) { playState.m_nNextOrder = playState.m_nCurrentOrder + 1; } if(adjustMode & eAdjust) { pChn->nPatternLoopCount = 0; pChn->nPatternLoop = 0; } } } break; // Set Tempo case CMD_TEMPO: if(!m_playBehaviour[kMODVBlankTiming]) { TEMPO tempo(CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn), 0); if ((adjustMode & eAdjust) && (GetType() & (MOD_TYPE_S3M | MOD_TYPE_IT | MOD_TYPE_MPT))) { if (tempo.GetInt()) pChn->nOldTempo = static_cast<uint8>(tempo.GetInt()); else tempo.Set(pChn->nOldTempo); } if (tempo.GetInt() >= 0x20) playState.m_nMusicTempo = tempo; else { // Tempo Slide TEMPO tempoDiff((tempo.GetInt() & 0x0F) * nonRowTicks, 0); if ((tempo.GetInt() & 0xF0) == 0x10) { playState.m_nMusicTempo += tempoDiff; } else { if(tempoDiff < playState.m_nMusicTempo) playState.m_nMusicTempo -= tempoDiff; else playState.m_nMusicTempo.Set(0); } } TEMPO tempoMin = GetModSpecifications().GetTempoMin(), tempoMax = GetModSpecifications().GetTempoMax(); if(m_playBehaviour[kTempoClamp]) // clamp tempo correctly in compatible mode { tempoMax.Set(255); } Limit(playState.m_nMusicTempo, tempoMin, tempoMax); } break; case CMD_S3MCMDEX: switch(param & 0xF0) { case 0x90: if(param <= 0x91) { pChn->dwFlags.set(CHN_SURROUND, param == 0x91); } break; case 0xA0: // High sample offset pChn->nOldHiOffset = param & 0x0F; break; case 0xB0: // Pattern Loop if (param & 0x0F) { patternLoopEndedOnThisRow = true; } else { CHANNELINDEX firstChn = nChn, lastChn = nChn; if(GetType() == MOD_TYPE_S3M) { // ST3 has only one global loop memory. firstChn = 0; lastChn = GetNumChannels() - 1; } for(CHANNELINDEX c = firstChn; c <= lastChn; c++) { memory.chnSettings[c].patLoop = memory.elapsedTime; memory.chnSettings[c].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[c].patLoopStart = playState.m_nRow; } patternLoopStartedOnThisRow = true; } break; case 0xF0: // Active macro pChn->nActiveMacro = param & 0x0F; break; } break; case CMD_MODCMDEX: switch(param & 0xF0) { case 0x60: // Pattern Loop if (param & 0x0F) { playState.m_nNextPatStartRow = memory.chnSettings[nChn].patLoopStart; // FT2 E60 bug patternLoopEndedOnThisRow = true; } else { patternLoopStartedOnThisRow = true; memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[nChn].patLoopStart = playState.m_nRow; } break; case 0xF0: // Active macro pChn->nActiveMacro = param & 0x0F; break; } break; case CMD_XFINEPORTAUPDOWN: // ignore high offset in compatible mode if(((param & 0xF0) == 0xA0) && !m_playBehaviour[kFT2RestrictXCommand]) pChn->nOldHiOffset = param & 0x0F; break; } // The following calculations are not interesting if we just want to get the song length. if (!(adjustMode & eAdjust)) continue; switch(command) { // Portamento Up/Down case CMD_PORTAMENTOUP: if(param) { // FT2 compatibility: Separate effect memory for all portamento commands // Test case: Porta-LinkMem.xm if(!m_playBehaviour[kFT2PortaUpDownMemory]) pChn->nOldPortaDown = param; pChn->nOldPortaUp = param; } break; case CMD_PORTAMENTODOWN: if(param) { // FT2 compatibility: Separate effect memory for all portamento commands // Test case: Porta-LinkMem.xm if(!m_playBehaviour[kFT2PortaUpDownMemory]) pChn->nOldPortaUp = param; pChn->nOldPortaDown = param; } break; // Tone-Portamento case CMD_TONEPORTAMENTO: if (param) pChn->nPortamentoSlide = param << 2; break; // Offset case CMD_OFFSET: if (param) pChn->oldOffset = param << 8; break; // Volume Slide case CMD_VOLUMESLIDE: case CMD_TONEPORTAVOL: if (param) pChn->nOldVolumeSlide = param; break; // Set Volume case CMD_VOLUME: memory.chnSettings[nChn].vol = param; break; // Global Volume case CMD_GLOBALVOLUME: if(!(GetType() & GLOBALVOL_7BIT_FORMATS) && param < 128) param *= 2; // IT compatibility 16. ST3 and IT ignore out-of-range values if(param <= 128) { playState.m_nGlobalVolume = param * 2; } else if(!(GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT | MOD_TYPE_S3M))) { playState.m_nGlobalVolume = 256; } break; // Global Volume Slide case CMD_GLOBALVOLSLIDE: if(m_playBehaviour[kPerChannelGlobalVolSlide]) { // IT compatibility 16. Global volume slide params are stored per channel (FT2/IT) if (param) pChn->nOldGlobalVolSlide = param; else param = pChn->nOldGlobalVolSlide; } else { if (param) playState.Chn[0].nOldGlobalVolSlide = param; else param = playState.Chn[0].nOldGlobalVolSlide; } if (((param & 0x0F) == 0x0F) && (param & 0xF0)) { param >>= 4; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume += param << 1; } else if (((param & 0xF0) == 0xF0) && (param & 0x0F)) { param = (param & 0x0F) << 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume -= param; } else if (param & 0xF0) { param >>= 4; param <<= 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume += param * nonRowTicks; } else { param = (param & 0x0F) << 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume -= param * nonRowTicks; } Limit(playState.m_nGlobalVolume, 0, 256); break; case CMD_CHANNELVOLUME: if (param <= 64) pChn->nGlobalVol = param; break; case CMD_CHANNELVOLSLIDE: { if (param) pChn->nOldChnVolSlide = param; else param = pChn->nOldChnVolSlide; int32 volume = pChn->nGlobalVol; if((param & 0x0F) == 0x0F && (param & 0xF0)) volume += (param >> 4); // Fine Up else if((param & 0xF0) == 0xF0 && (param & 0x0F)) volume -= (param & 0x0F); // Fine Down else if(param & 0x0F) // Down volume -= (param & 0x0F) * nonRowTicks; else // Up volume += ((param & 0xF0) >> 4) * nonRowTicks; Limit(volume, 0, 64); pChn->nGlobalVol = volume; } break; case CMD_PANNING8: Panning(pChn, param, Pan8bit); break; case CMD_MODCMDEX: if(param < 0x10) { // LED filter for(CHANNELINDEX chn = 0; chn < GetNumChannels(); chn++) { playState.Chn[chn].dwFlags.set(CHN_AMIGAFILTER, !(param & 1)); } } MPT_FALLTHROUGH; case CMD_S3MCMDEX: if((param & 0xF0) == 0x80) { Panning(pChn, (param & 0x0F), Pan4bit); } break; case CMD_VIBRATOVOL: if (param) pChn->nOldVolumeSlide = param; param = 0; MPT_FALLTHROUGH; case CMD_VIBRATO: Vibrato(pChn, param); break; case CMD_FINEVIBRATO: FineVibrato(pChn, param); break; case CMD_TREMOLO: Tremolo(pChn, param); break; case CMD_PANBRELLO: Panbrello(pChn, param); break; } switch(pChn->rowCommand.volcmd) { case VOLCMD_PANNING: Panning(pChn, pChn->rowCommand.vol, Pan6bit); break; case VOLCMD_VIBRATOSPEED: // FT2 does not automatically enable vibrato with the "set vibrato speed" command if(m_playBehaviour[kFT2VolColVibrato]) pChn->nVibratoSpeed = pChn->rowCommand.vol & 0x0F; else Vibrato(pChn, pChn->rowCommand.vol << 4); break; case VOLCMD_VIBRATODEPTH: Vibrato(pChn, pChn->rowCommand.vol); break; } // Process vibrato / tremolo / panbrello switch(pChn->rowCommand.command) { case CMD_VIBRATO: case CMD_FINEVIBRATO: case CMD_VIBRATOVOL: if(adjustMode & eAdjust) { uint32 vibTicks = ((GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT)) && !m_SongFlags[SONG_ITOLDEFFECTS]) ? numTicks : nonRowTicks; uint32 inc = pChn->nVibratoSpeed * vibTicks; if(m_playBehaviour[kITVibratoTremoloPanbrello]) inc *= 4; pChn->nVibratoPos += static_cast<uint8>(inc); } break; case CMD_TREMOLO: if(adjustMode & eAdjust) { uint32 tremTicks = ((GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT)) && !m_SongFlags[SONG_ITOLDEFFECTS]) ? numTicks : nonRowTicks; uint32 inc = pChn->nTremoloSpeed * tremTicks; if(m_playBehaviour[kITVibratoTremoloPanbrello]) inc *= 4; pChn->nTremoloPos += static_cast<uint8>(inc); } break; case CMD_PANBRELLO: if(adjustMode & eAdjust) { // Panbrello effect is permanent in compatible mode, so actually apply panbrello for the last tick of this row pChn->nPanbrelloPos += static_cast<uint8>(pChn->nPanbrelloSpeed * (numTicks - 1)); ProcessPanbrello(pChn); } break; } } // Interpret F00 effect in XM files as "stop song" if(GetType() == MOD_TYPE_XM && playState.m_nMusicSpeed == uint16_max) { break; } playState.m_nCurrentRowsPerBeat = m_nDefaultRowsPerBeat; if(Patterns[playState.m_nPattern].GetOverrideSignature()) { playState.m_nCurrentRowsPerBeat = Patterns[playState.m_nPattern].GetRowsPerBeat(); } const uint32 tickDuration = GetTickDuration(playState); const uint32 rowDuration = tickDuration * numTicks; memory.elapsedTime += static_cast<double>(rowDuration) / static_cast<double>(m_MixerSettings.gdwMixingFreq); playState.m_lTotalSampleCount += rowDuration; if(adjustSamplePos) { // Super experimental and dirty sample seeking pChn = playState.Chn; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); pChn++, nChn++) { if(memory.chnSettings[nChn].ticksToRender == GetLengthMemory::IGNORE_CHANNEL) continue; uint32 startTick = 0; const ModCommand &m = pChn->rowCommand; uint32 paramHi = m.param >> 4, paramLo = m.param & 0x0F; bool porta = m.command == CMD_TONEPORTAMENTO || m.command == CMD_TONEPORTAVOL || m.volcmd == VOLCMD_TONEPORTAMENTO; bool stopNote = patternLoopStartedOnThisRow; // It's too much trouble to keep those pattern loops in sync... if(m.instr) pChn->proTrackerOffset = 0; if(m.IsNote()) { if(porta && memory.chnSettings[nChn].incChanged) { // If there's a portamento, the current channel increment mustn't be 0 in NoteChange() pChn->increment = GetChannelIncrement(pChn, pChn->nPeriod, 0); } int32 setPan = pChn->nPan; pChn->nNewNote = pChn->nLastNote; if(pChn->nNewIns != 0) InstrumentChange(pChn, pChn->nNewIns, porta); NoteChange(pChn, m.note, porta); memory.chnSettings[nChn].incChanged = true; if((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && (m.param & 0xF0) == 0xD0 && paramLo < numTicks) { startTick = paramLo; } else if(m.command == CMD_DELAYCUT && paramHi < numTicks) { startTick = paramHi; } if(rowDelay > 1 && startTick != 0 && (GetType() & (MOD_TYPE_S3M | MOD_TYPE_IT | MOD_TYPE_MPT))) { startTick += (playState.m_nMusicSpeed + tickDelay) * (rowDelay - 1); } if(!porta) memory.chnSettings[nChn].ticksToRender = 0; // Panning commands have to be re-applied after a note change with potential pan change. if(m.command == CMD_PANNING8 || ((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && paramHi == 0x8) || m.volcmd == VOLCMD_PANNING) { pChn->nPan = setPan; } if(m.command == CMD_OFFSET) { bool isExtended = false; SmpLength offset = CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn, &isExtended); if(!isExtended) { offset <<= 8; if(offset == 0) offset = pChn->oldOffset; offset += static_cast<SmpLength>(pChn->nOldHiOffset) << 16; } SampleOffset(*pChn, offset); } else if(m.command == CMD_OFFSETPERCENTAGE) { SampleOffset(*pChn, Util::muldiv_unsigned(pChn->nLength, m.param, 255)); } else if(m.command == CMD_REVERSEOFFSET && pChn->pModSample != nullptr) { memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far ReverseSampleOffset(*pChn, m.param); startTick = playState.m_nMusicSpeed - 1; } else if(m.volcmd == VOLCMD_OFFSET) { if(m.vol <= CountOf(pChn->pModSample->cues) && pChn->pModSample != nullptr) { SmpLength offset; if(m.vol == 0) offset = pChn->oldOffset; else offset = pChn->oldOffset = pChn->pModSample->cues[m.vol - 1]; SampleOffset(*pChn, offset); } } } if(m.note == NOTE_KEYOFF || m.note == NOTE_NOTECUT || (m.note == NOTE_FADE && GetNumInstruments()) || ((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && (m.param & 0xF0) == 0xC0 && paramLo < numTicks) || (m.command == CMD_DELAYCUT && paramLo != 0 && startTick + paramLo < numTicks)) { stopNote = true; } if(m.command == CMD_VOLUME) { pChn->nVolume = m.param * 4; } else if(m.volcmd == VOLCMD_VOLUME) { pChn->nVolume = m.vol * 4; } if(pChn->pModSample && !stopNote) { // Check if we don't want to emulate some effect and thus stop processing. if(m.command < MAX_EFFECTS) { if(forbiddenCommands[m.command]) { stopNote = true; } else if(m.command == CMD_MODCMDEX) { // Special case: Slides using extended commands switch(m.param & 0xF0) { case 0x10: case 0x20: stopNote = true; } } } if(m.volcmd < forbiddenVolCommands.size() && forbiddenVolCommands[m.volcmd]) { stopNote = true; } } if(stopNote) { pChn->Stop(); memory.chnSettings[nChn].ticksToRender = 0; } else { if(oldTickDuration != tickDuration && oldTickDuration != 0) { memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far } switch(m.command) { case CMD_TONEPORTAVOL: case CMD_VOLUMESLIDE: case CMD_VIBRATOVOL: if(m.param || (GetType() != MOD_TYPE_MOD)) { for(uint32 i = 0; i < numTicks; i++) { pChn->isFirstTick = (i == 0); VolumeSlide(pChn, m.param); } } break; case CMD_MODCMDEX: if((m.param & 0x0F) || (GetType() & (MOD_TYPE_XM | MOD_TYPE_MT2))) { pChn->isFirstTick = true; switch(m.param & 0xF0) { case 0xA0: FineVolumeUp(pChn, m.param & 0x0F, false); break; case 0xB0: FineVolumeDown(pChn, m.param & 0x0F, false); break; } } break; case CMD_S3MCMDEX: if(m.param == 0x9E) { // Play forward memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far pChn->dwFlags.reset(CHN_PINGPONGFLAG); } else if(m.param == 0x9F) { // Reverse memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far pChn->dwFlags.set(CHN_PINGPONGFLAG); if(!pChn->position.GetInt() && pChn->nLength && (m.IsNote() || !pChn->dwFlags[CHN_LOOP])) { pChn->position.Set(pChn->nLength - 1, SamplePosition::fractMax); } } else if((m.param & 0xF0) == 0x70) { // TODO //ExtendedS3MCommands(nChn, param); } break; } pChn->isFirstTick = true; switch(m.volcmd) { case VOLCMD_FINEVOLUP: FineVolumeUp(pChn, m.vol, m_playBehaviour[kITVolColMemory]); break; case VOLCMD_FINEVOLDOWN: FineVolumeDown(pChn, m.vol, m_playBehaviour[kITVolColMemory]); break; case VOLCMD_VOLSLIDEUP: case VOLCMD_VOLSLIDEDOWN: { // IT Compatibility: Volume column volume slides have their own memory // Test case: VolColMemory.it ModCommand::VOL vol = m.vol; if(vol == 0 && m_playBehaviour[kITVolColMemory]) { vol = pChn->nOldVolParam; if(vol == 0) break; } if(m.volcmd == VOLCMD_VOLSLIDEUP) vol <<= 4; for(uint32 i = 0; i < numTicks; i++) { pChn->isFirstTick = (i == 0); VolumeSlide(pChn, vol); } } break; } if(porta) { // Portamento needs immediate syncing, as the pitch changes on each tick uint32 portaTick = memory.chnSettings[nChn].ticksToRender + startTick + 1; memory.chnSettings[nChn].ticksToRender += numTicks; memory.RenderChannel(nChn, tickDuration, portaTick); } else { memory.chnSettings[nChn].ticksToRender += (numTicks - startTick); } } } } oldTickDuration = tickDuration; // Pattern loop is not executed in FT2 if there are any position jump or pattern break commands on the same row. // Pattern loop is not executed in IT if there are any position jump commands on the same row. // Test case for FT2 exception: PatLoop-Jumps.xm, PatLoop-Various.xm // Test case for IT: exception: LoopBreak.it if(patternLoopEndedOnThisRow && (!m_playBehaviour[kFT2PatternLoopWithJumps] || !(positionJumpOnThisRow || patternBreakOnThisRow)) && (!m_playBehaviour[kITPatternLoopWithJumps] || !positionJumpOnThisRow)) { std::map<double, int> startTimes; // This is really just a simple estimation for nested pattern loops. It should handle cases correctly where all parallel loops start and end on the same row. // If one of them starts or ends "in between", it will most likely calculate a wrong duration. // For S3M files, it's also way off. pChn = playState.Chn; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++) { ModCommand::COMMAND command = pChn->rowCommand.command; ModCommand::PARAM param = pChn->rowCommand.param; if((command == CMD_S3MCMDEX && param >= 0xB1 && param <= 0xBF) || (command == CMD_MODCMDEX && param >= 0x61 && param <= 0x6F)) { const double start = memory.chnSettings[nChn].patLoop; if(!startTimes[start]) startTimes[start] = 1; startTimes[start] = mpt::lcm(startTimes[start], 1 + (param & 0x0F)); } } for(const auto &i : startTimes) { memory.elapsedTime += (memory.elapsedTime - i.first) * (double)(i.second - 1); for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++) { if(memory.chnSettings[nChn].patLoop == i.first) { playState.m_lTotalSampleCount += (playState.m_lTotalSampleCount - memory.chnSettings[nChn].patLoopSmp) * (i.second - 1); if(m_playBehaviour[kITPatternLoopTargetReset] || (GetType() == MOD_TYPE_S3M)) { memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[nChn].patLoopStart = playState.m_nRow + 1; } break; } } } if(GetType() == MOD_TYPE_IT) { // IT pattern loop start row update - at the end of a pattern loop, set pattern loop start to next row (for upcoming pattern loops with missing SB0) for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++) { if((pChn->rowCommand.command == CMD_S3MCMDEX && pChn->rowCommand.param >= 0xB1 && pChn->rowCommand.param <= 0xBF)) { memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; } } } } } // Now advance the sample positions for sample seeking on channels that are still playing if(adjustSamplePos) { for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++) { if(memory.chnSettings[nChn].ticksToRender != GetLengthMemory::IGNORE_CHANNEL) { memory.RenderChannel(nChn, oldTickDuration); } } } if(retval.targetReached || target.mode == GetLengthTarget::NoTarget) { retval.lastOrder = playState.m_nCurrentOrder; retval.lastRow = playState.m_nRow; } retval.duration = memory.elapsedTime; results.push_back(retval); // Store final variables if(adjustMode & eAdjust) { if(retval.targetReached || target.mode == GetLengthTarget::NoTarget) { // Target found, or there is no target (i.e. play whole song)... m_PlayState = std::move(playState); m_PlayState.m_nNextRow = m_PlayState.m_nRow; m_PlayState.m_nFrameDelay = m_PlayState.m_nPatternDelay = 0; m_PlayState.m_nTickCount = Util::MaxValueOfType(m_PlayState.m_nTickCount) - 1; m_PlayState.m_bPositionChanged = true; for(CHANNELINDEX n = 0; n < GetNumChannels(); n++) { if(m_PlayState.Chn[n].nLastNote != NOTE_NONE) { m_PlayState.Chn[n].nNewNote = m_PlayState.Chn[n].nLastNote; } if(memory.chnSettings[n].vol != 0xFF && !adjustSamplePos) { m_PlayState.Chn[n].nVolume = std::min(memory.chnSettings[n].vol, uint8(64)) * 4; } } #ifndef NO_PLUGINS // If there were any PC events, update plugin parameters to their latest value. std::bitset<MAX_MIXPLUGINS> plugSetProgram; for(const auto &param : memory.plugParams) { PLUGINDEX plug = param.first.first - 1; IMixPlugin *plugin = m_MixPlugins[plug].pMixPlugin; if(plugin != nullptr) { if(!plugSetProgram[plug]) { // Used for bridged plugins to avoid sending out individual messages for each parameter. plugSetProgram.set(plug); plugin->BeginSetProgram(); } plugin->SetParameter(param.first.second, param.second / PlugParamValue(ModCommand::maxColumnValue)); } } if(plugSetProgram.any()) { for(PLUGINDEX i = 0; i < MAX_MIXPLUGINS; i++) { if(plugSetProgram[i]) { m_MixPlugins[i].pMixPlugin->EndSetProgram(); } } } #endif // NO_PLUGINS } else if(adjustMode != eAdjustOnSuccess) { // Target not found (e.g. when jumping to a hidden sub song), reset global variables... m_PlayState.m_nMusicSpeed = m_nDefaultSpeed; m_PlayState.m_nMusicTempo = m_nDefaultTempo; m_PlayState.m_nGlobalVolume = m_nDefaultGlobalVolume; } // When adjusting the playback status, we will also want to update the visited rows vector according to the current position. if(sequence != Order.GetCurrentSequenceIndex()) { Order.SetSequence(sequence); } visitedSongRows.Set(visitedRows); } return results; }
std::vector<GetLengthType> CSoundFile::GetLength(enmGetLengthResetMode adjustMode, GetLengthTarget target) { std::vector<GetLengthType> results; GetLengthType retval; retval.startOrder = target.startOrder; retval.startRow = target.startRow; // Are we trying to reach a certain pattern position? const bool hasSearchTarget = target.mode != GetLengthTarget::NoTarget; const bool adjustSamplePos = (adjustMode & eAdjustSamplePositions) == eAdjustSamplePositions; SEQUENCEINDEX sequence = target.sequence; if(sequence >= Order.GetNumSequences()) sequence = Order.GetCurrentSequenceIndex(); const ModSequence &orderList = Order(sequence); GetLengthMemory memory(*this); CSoundFile::PlayState &playState = *memory.state; // Temporary visited rows vector (so that GetLength() won't interfere with the player code if the module is playing at the same time) RowVisitor visitedRows(*this, sequence); playState.m_nNextRow = playState.m_nRow = target.startRow; playState.m_nNextOrder = playState.m_nCurrentOrder = target.startOrder; // Fast LUTs for commands that are too weird / complicated / whatever to emulate in sample position adjust mode. std::bitset<MAX_EFFECTS> forbiddenCommands; std::bitset<MAX_VOLCMDS> forbiddenVolCommands; if(adjustSamplePos) { forbiddenCommands.set(CMD_ARPEGGIO); forbiddenCommands.set(CMD_PORTAMENTOUP); forbiddenCommands.set(CMD_PORTAMENTODOWN); forbiddenCommands.set(CMD_XFINEPORTAUPDOWN); forbiddenCommands.set(CMD_NOTESLIDEUP); forbiddenCommands.set(CMD_NOTESLIDEUPRETRIG); forbiddenCommands.set(CMD_NOTESLIDEDOWN); forbiddenCommands.set(CMD_NOTESLIDEDOWNRETRIG); forbiddenVolCommands.set(VOLCMD_PORTAUP); forbiddenVolCommands.set(VOLCMD_PORTADOWN); // Optimize away channels for which it's pointless to adjust sample positions for(CHANNELINDEX i = 0; i < GetNumChannels(); i++) { if(ChnSettings[i].dwFlags[CHN_MUTE]) memory.chnSettings[i].ticksToRender = GetLengthMemory::IGNORE_CHANNEL; } if(target.mode == GetLengthTarget::SeekPosition && target.pos.order < orderList.size()) { // If we know where to seek, we can directly rule out any channels on which a new note would be triggered right at the start. const PATTERNINDEX seekPat = orderList[target.pos.order]; if(Patterns.IsValidPat(seekPat) && Patterns[seekPat].IsValidRow(target.pos.row)) { const ModCommand *m = Patterns[seekPat].GetRow(target.pos.row); for(CHANNELINDEX i = 0; i < GetNumChannels(); i++, m++) { if(m->note == NOTE_NOTECUT || m->note == NOTE_KEYOFF || (m->note == NOTE_FADE && GetNumInstruments()) || (m->IsNote() && !m->IsPortamento())) { memory.chnSettings[i].ticksToRender = GetLengthMemory::IGNORE_CHANNEL; } } } } } // If samples are being synced, force them to resync if tick duration changes uint32 oldTickDuration = 0; for (;;) { // Time target reached. if(target.mode == GetLengthTarget::SeekSeconds && memory.elapsedTime >= target.time) { retval.targetReached = true; break; } uint32 rowDelay = 0, tickDelay = 0; playState.m_nRow = playState.m_nNextRow; playState.m_nCurrentOrder = playState.m_nNextOrder; if(orderList.IsValidPat(playState.m_nCurrentOrder) && playState.m_nRow >= Patterns[orderList[playState.m_nCurrentOrder]].GetNumRows()) { playState.m_nRow = 0; if(m_playBehaviour[kFT2LoopE60Restart]) { playState.m_nRow = playState.m_nNextPatStartRow; playState.m_nNextPatStartRow = 0; } playState.m_nCurrentOrder = ++playState.m_nNextOrder; } // Check if pattern is valid playState.m_nPattern = playState.m_nCurrentOrder < orderList.size() ? orderList[playState.m_nCurrentOrder] : orderList.GetInvalidPatIndex(); bool positionJumpOnThisRow = false; bool patternBreakOnThisRow = false; bool patternLoopEndedOnThisRow = false, patternLoopStartedOnThisRow = false; if(!Patterns.IsValidPat(playState.m_nPattern) && playState.m_nPattern != orderList.GetInvalidPatIndex() && target.mode == GetLengthTarget::SeekPosition && playState.m_nCurrentOrder == target.pos.order) { // Early test: Target is inside +++ or non-existing pattern retval.targetReached = true; break; } while(playState.m_nPattern >= Patterns.Size()) { // End of song? if((playState.m_nPattern == orderList.GetInvalidPatIndex()) || (playState.m_nCurrentOrder >= orderList.size())) { if(playState.m_nCurrentOrder == orderList.GetRestartPos()) break; else playState.m_nCurrentOrder = orderList.GetRestartPos(); } else { playState.m_nCurrentOrder++; } playState.m_nPattern = (playState.m_nCurrentOrder < orderList.size()) ? orderList[playState.m_nCurrentOrder] : orderList.GetInvalidPatIndex(); playState.m_nNextOrder = playState.m_nCurrentOrder; if((!Patterns.IsValidPat(playState.m_nPattern)) && visitedRows.IsVisited(playState.m_nCurrentOrder, 0, true)) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nCurrentOrder = playState.m_nNextOrder; playState.m_nPattern = orderList[playState.m_nCurrentOrder]; playState.m_nNextRow = playState.m_nRow; break; } } } if(playState.m_nNextOrder == ORDERINDEX_INVALID) { // GetFirstUnvisitedRow failed, so there is nothing more to play break; } // Skip non-existing patterns if(!Patterns.IsValidPat(playState.m_nPattern)) { // If there isn't even a tune, we should probably stop here. if(playState.m_nCurrentOrder == orderList.GetRestartPos()) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nNextRow = playState.m_nRow; continue; } } playState.m_nNextOrder = playState.m_nCurrentOrder + 1; continue; } // Should never happen if(playState.m_nRow >= Patterns[playState.m_nPattern].GetNumRows()) playState.m_nRow = 0; // Check whether target was reached. if(target.mode == GetLengthTarget::SeekPosition && playState.m_nCurrentOrder == target.pos.order && playState.m_nRow == target.pos.row) { retval.targetReached = true; break; } if(visitedRows.IsVisited(playState.m_nCurrentOrder, playState.m_nRow, true)) { if(!hasSearchTarget || !visitedRows.GetFirstUnvisitedRow(playState.m_nNextOrder, playState.m_nRow, true)) { // We aren't searching for a specific row, or we couldn't find any more unvisited rows. break; } else { // We haven't found the target row yet, but we found some other unplayed row... continue searching from here. retval.duration = memory.elapsedTime; results.push_back(retval); retval.startRow = playState.m_nRow; retval.startOrder = playState.m_nNextOrder; memory.Reset(); playState.m_nNextRow = playState.m_nRow; continue; } } retval.endOrder = playState.m_nCurrentOrder; retval.endRow = playState.m_nRow; // Update next position playState.m_nNextRow = playState.m_nRow + 1; // Jumped to invalid pattern row? if(playState.m_nRow >= Patterns[playState.m_nPattern].GetNumRows()) { playState.m_nRow = 0; } // New pattern? if(!playState.m_nRow) { for(CHANNELINDEX chn = 0; chn < GetNumChannels(); chn++) { memory.chnSettings[chn].patLoop = memory.elapsedTime; memory.chnSettings[chn].patLoopSmp = playState.m_lTotalSampleCount; } } ModChannel *pChn = playState.Chn; // For various effects, we need to know first how many ticks there are in this row. const ModCommand *p = Patterns[playState.m_nPattern].GetpModCommand(playState.m_nRow, 0); for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, p++) { if(m_playBehaviour[kST3NoMutedChannels] && ChnSettings[nChn].dwFlags[CHN_MUTE]) // not even effects are processed on muted S3M channels continue; if(p->IsPcNote()) { #ifndef NO_PLUGINS if((adjustMode & eAdjust) && p->instr > 0 && p->instr <= MAX_MIXPLUGINS) { memory.plugParams[std::make_pair(p->instr, p->GetValueVolCol())] = p->GetValueEffectCol(); } #endif // NO_PLUGINS pChn[nChn].rowCommand.Clear(); continue; } pChn[nChn].rowCommand = *p; switch(p->command) { case CMD_SPEED: SetSpeed(playState, p->param); break; case CMD_TEMPO: if(m_playBehaviour[kMODVBlankTiming]) { // ProTracker MODs with VBlank timing: All Fxx parameters set the tick count. if(p->param != 0) SetSpeed(playState, p->param); } break; case CMD_S3MCMDEX: if((p->param & 0xF0) == 0x60) { // Fine Pattern Delay tickDelay += (p->param & 0x0F); } else if((p->param & 0xF0) == 0xE0 && !rowDelay) { // Pattern Delay if(!(GetType() & MOD_TYPE_S3M) || (p->param & 0x0F) != 0) { // While Impulse Tracker *does* count S60 as a valid row delay (and thus ignores any other row delay commands on the right), // Scream Tracker 3 simply ignores such commands. rowDelay = 1 + (p->param & 0x0F); } } break; case CMD_MODCMDEX: if((p->param & 0xF0) == 0xE0) { // Pattern Delay rowDelay = 1 + (p->param & 0x0F); } break; } } if(rowDelay == 0) rowDelay = 1; const uint32 numTicks = (playState.m_nMusicSpeed + tickDelay) * rowDelay; const uint32 nonRowTicks = numTicks - rowDelay; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); pChn++, nChn++) if(!pChn->rowCommand.IsEmpty()) { if(m_playBehaviour[kST3NoMutedChannels] && ChnSettings[nChn].dwFlags[CHN_MUTE]) // not even effects are processed on muted S3M channels continue; ModCommand::COMMAND command = pChn->rowCommand.command; ModCommand::PARAM param = pChn->rowCommand.param; ModCommand::NOTE note = pChn->rowCommand.note; if (pChn->rowCommand.instr) { pChn->nNewIns = pChn->rowCommand.instr; pChn->nLastNote = NOTE_NONE; memory.chnSettings[nChn].vol = 0xFF; } if (pChn->rowCommand.IsNote()) pChn->nLastNote = note; // Update channel panning if(pChn->rowCommand.IsNote() || pChn->rowCommand.instr) { SAMPLEINDEX smp = 0; if(GetNumInstruments()) { ModInstrument *pIns; if(pChn->nNewIns <= GetNumInstruments() && (pIns = Instruments[pChn->nNewIns]) != nullptr) { if(pIns->dwFlags[INS_SETPANNING]) pChn->nPan = pIns->nPan; if(ModCommand::IsNote(note)) smp = pIns->Keyboard[note - NOTE_MIN]; } } else { smp = pChn->nNewIns; } if(smp > 0 && smp <= GetNumSamples() && Samples[smp].uFlags[CHN_PANNING]) { pChn->nPan = Samples[smp].nPan; } } switch(pChn->rowCommand.volcmd) { case VOLCMD_VOLUME: memory.chnSettings[nChn].vol = pChn->rowCommand.vol; break; case VOLCMD_VOLSLIDEUP: case VOLCMD_VOLSLIDEDOWN: if(pChn->rowCommand.vol != 0) pChn->nOldVolParam = pChn->rowCommand.vol; break; } switch(command) { // Position Jump case CMD_POSITIONJUMP: positionJumpOnThisRow = true; playState.m_nNextOrder = static_cast<ORDERINDEX>(CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn)); playState.m_nNextPatStartRow = 0; // FT2 E60 bug // see https://forum.openmpt.org/index.php?topic=2769.0 - FastTracker resets Dxx if Bxx is called _after_ Dxx // Test case: PatternJump.mod if(!patternBreakOnThisRow || (GetType() & (MOD_TYPE_MOD | MOD_TYPE_XM))) playState.m_nNextRow = 0; if (adjustMode & eAdjust) { pChn->nPatternLoopCount = 0; pChn->nPatternLoop = 0; } break; // Pattern Break case CMD_PATTERNBREAK: { ROWINDEX row = PatternBreak(playState, nChn, param); if(row != ROWINDEX_INVALID) { patternBreakOnThisRow = true; playState.m_nNextRow = row; if(!positionJumpOnThisRow) { playState.m_nNextOrder = playState.m_nCurrentOrder + 1; } if(adjustMode & eAdjust) { pChn->nPatternLoopCount = 0; pChn->nPatternLoop = 0; } } } break; // Set Tempo case CMD_TEMPO: if(!m_playBehaviour[kMODVBlankTiming]) { TEMPO tempo(CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn), 0); if ((adjustMode & eAdjust) && (GetType() & (MOD_TYPE_S3M | MOD_TYPE_IT | MOD_TYPE_MPT))) { if (tempo.GetInt()) pChn->nOldTempo = static_cast<uint8>(tempo.GetInt()); else tempo.Set(pChn->nOldTempo); } if (tempo.GetInt() >= 0x20) playState.m_nMusicTempo = tempo; else { // Tempo Slide TEMPO tempoDiff((tempo.GetInt() & 0x0F) * nonRowTicks, 0); if ((tempo.GetInt() & 0xF0) == 0x10) { playState.m_nMusicTempo += tempoDiff; } else { if(tempoDiff < playState.m_nMusicTempo) playState.m_nMusicTempo -= tempoDiff; else playState.m_nMusicTempo.Set(0); } } TEMPO tempoMin = GetModSpecifications().GetTempoMin(), tempoMax = GetModSpecifications().GetTempoMax(); if(m_playBehaviour[kTempoClamp]) // clamp tempo correctly in compatible mode { tempoMax.Set(255); } Limit(playState.m_nMusicTempo, tempoMin, tempoMax); } break; case CMD_S3MCMDEX: switch(param & 0xF0) { case 0x90: if(param <= 0x91) { pChn->dwFlags.set(CHN_SURROUND, param == 0x91); } break; case 0xA0: // High sample offset pChn->nOldHiOffset = param & 0x0F; break; case 0xB0: // Pattern Loop if (param & 0x0F) { patternLoopEndedOnThisRow = true; } else { CHANNELINDEX firstChn = nChn, lastChn = nChn; if(GetType() == MOD_TYPE_S3M) { // ST3 has only one global loop memory. firstChn = 0; lastChn = GetNumChannels() - 1; } for(CHANNELINDEX c = firstChn; c <= lastChn; c++) { memory.chnSettings[c].patLoop = memory.elapsedTime; memory.chnSettings[c].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[c].patLoopStart = playState.m_nRow; } patternLoopStartedOnThisRow = true; } break; case 0xF0: // Active macro pChn->nActiveMacro = param & 0x0F; break; } break; case CMD_MODCMDEX: switch(param & 0xF0) { case 0x60: // Pattern Loop if (param & 0x0F) { playState.m_nNextPatStartRow = memory.chnSettings[nChn].patLoopStart; // FT2 E60 bug patternLoopEndedOnThisRow = true; } else { patternLoopStartedOnThisRow = true; memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[nChn].patLoopStart = playState.m_nRow; } break; case 0xF0: // Active macro pChn->nActiveMacro = param & 0x0F; break; } break; case CMD_XFINEPORTAUPDOWN: // ignore high offset in compatible mode if(((param & 0xF0) == 0xA0) && !m_playBehaviour[kFT2RestrictXCommand]) pChn->nOldHiOffset = param & 0x0F; break; } // The following calculations are not interesting if we just want to get the song length. if (!(adjustMode & eAdjust)) continue; switch(command) { // Portamento Up/Down case CMD_PORTAMENTOUP: if(param) { // FT2 compatibility: Separate effect memory for all portamento commands // Test case: Porta-LinkMem.xm if(!m_playBehaviour[kFT2PortaUpDownMemory]) pChn->nOldPortaDown = param; pChn->nOldPortaUp = param; } break; case CMD_PORTAMENTODOWN: if(param) { // FT2 compatibility: Separate effect memory for all portamento commands // Test case: Porta-LinkMem.xm if(!m_playBehaviour[kFT2PortaUpDownMemory]) pChn->nOldPortaUp = param; pChn->nOldPortaDown = param; } break; // Tone-Portamento case CMD_TONEPORTAMENTO: if (param) pChn->nPortamentoSlide = param << 2; break; // Offset case CMD_OFFSET: if (param) pChn->oldOffset = param << 8; break; // Volume Slide case CMD_VOLUMESLIDE: case CMD_TONEPORTAVOL: if (param) pChn->nOldVolumeSlide = param; break; // Set Volume case CMD_VOLUME: memory.chnSettings[nChn].vol = param; break; // Global Volume case CMD_GLOBALVOLUME: if(!(GetType() & GLOBALVOL_7BIT_FORMATS) && param < 128) param *= 2; // IT compatibility 16. ST3 and IT ignore out-of-range values if(param <= 128) { playState.m_nGlobalVolume = param * 2; } else if(!(GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT | MOD_TYPE_S3M))) { playState.m_nGlobalVolume = 256; } break; // Global Volume Slide case CMD_GLOBALVOLSLIDE: if(m_playBehaviour[kPerChannelGlobalVolSlide]) { // IT compatibility 16. Global volume slide params are stored per channel (FT2/IT) if (param) pChn->nOldGlobalVolSlide = param; else param = pChn->nOldGlobalVolSlide; } else { if (param) playState.Chn[0].nOldGlobalVolSlide = param; else param = playState.Chn[0].nOldGlobalVolSlide; } if (((param & 0x0F) == 0x0F) && (param & 0xF0)) { param >>= 4; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume += param << 1; } else if (((param & 0xF0) == 0xF0) && (param & 0x0F)) { param = (param & 0x0F) << 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume -= param; } else if (param & 0xF0) { param >>= 4; param <<= 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume += param * nonRowTicks; } else { param = (param & 0x0F) << 1; if (!(GetType() & GLOBALVOL_7BIT_FORMATS)) param <<= 1; playState.m_nGlobalVolume -= param * nonRowTicks; } Limit(playState.m_nGlobalVolume, 0, 256); break; case CMD_CHANNELVOLUME: if (param <= 64) pChn->nGlobalVol = param; break; case CMD_CHANNELVOLSLIDE: { if (param) pChn->nOldChnVolSlide = param; else param = pChn->nOldChnVolSlide; int32 volume = pChn->nGlobalVol; if((param & 0x0F) == 0x0F && (param & 0xF0)) volume += (param >> 4); // Fine Up else if((param & 0xF0) == 0xF0 && (param & 0x0F)) volume -= (param & 0x0F); // Fine Down else if(param & 0x0F) // Down volume -= (param & 0x0F) * nonRowTicks; else // Up volume += ((param & 0xF0) >> 4) * nonRowTicks; Limit(volume, 0, 64); pChn->nGlobalVol = volume; } break; case CMD_PANNING8: Panning(pChn, param, Pan8bit); break; case CMD_MODCMDEX: if(param < 0x10) { // LED filter for(CHANNELINDEX chn = 0; chn < GetNumChannels(); chn++) { playState.Chn[chn].dwFlags.set(CHN_AMIGAFILTER, !(param & 1)); } } MPT_FALLTHROUGH; case CMD_S3MCMDEX: if((param & 0xF0) == 0x80) { Panning(pChn, (param & 0x0F), Pan4bit); } break; case CMD_VIBRATOVOL: if (param) pChn->nOldVolumeSlide = param; param = 0; MPT_FALLTHROUGH; case CMD_VIBRATO: Vibrato(pChn, param); break; case CMD_FINEVIBRATO: FineVibrato(pChn, param); break; case CMD_TREMOLO: Tremolo(pChn, param); break; case CMD_PANBRELLO: Panbrello(pChn, param); break; } switch(pChn->rowCommand.volcmd) { case VOLCMD_PANNING: Panning(pChn, pChn->rowCommand.vol, Pan6bit); break; case VOLCMD_VIBRATOSPEED: // FT2 does not automatically enable vibrato with the "set vibrato speed" command if(m_playBehaviour[kFT2VolColVibrato]) pChn->nVibratoSpeed = pChn->rowCommand.vol & 0x0F; else Vibrato(pChn, pChn->rowCommand.vol << 4); break; case VOLCMD_VIBRATODEPTH: Vibrato(pChn, pChn->rowCommand.vol); break; } // Process vibrato / tremolo / panbrello switch(pChn->rowCommand.command) { case CMD_VIBRATO: case CMD_FINEVIBRATO: case CMD_VIBRATOVOL: if(adjustMode & eAdjust) { uint32 vibTicks = ((GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT)) && !m_SongFlags[SONG_ITOLDEFFECTS]) ? numTicks : nonRowTicks; uint32 inc = pChn->nVibratoSpeed * vibTicks; if(m_playBehaviour[kITVibratoTremoloPanbrello]) inc *= 4; pChn->nVibratoPos += static_cast<uint8>(inc); } break; case CMD_TREMOLO: if(adjustMode & eAdjust) { uint32 tremTicks = ((GetType() & (MOD_TYPE_IT | MOD_TYPE_MPT)) && !m_SongFlags[SONG_ITOLDEFFECTS]) ? numTicks : nonRowTicks; uint32 inc = pChn->nTremoloSpeed * tremTicks; if(m_playBehaviour[kITVibratoTremoloPanbrello]) inc *= 4; pChn->nTremoloPos += static_cast<uint8>(inc); } break; case CMD_PANBRELLO: if(adjustMode & eAdjust) { // Panbrello effect is permanent in compatible mode, so actually apply panbrello for the last tick of this row pChn->nPanbrelloPos += static_cast<uint8>(pChn->nPanbrelloSpeed * (numTicks - 1)); ProcessPanbrello(pChn); } break; } } // Interpret F00 effect in XM files as "stop song" if(GetType() == MOD_TYPE_XM && playState.m_nMusicSpeed == uint16_max) { break; } playState.m_nCurrentRowsPerBeat = m_nDefaultRowsPerBeat; if(Patterns[playState.m_nPattern].GetOverrideSignature()) { playState.m_nCurrentRowsPerBeat = Patterns[playState.m_nPattern].GetRowsPerBeat(); } const uint32 tickDuration = GetTickDuration(playState); const uint32 rowDuration = tickDuration * numTicks; memory.elapsedTime += static_cast<double>(rowDuration) / static_cast<double>(m_MixerSettings.gdwMixingFreq); playState.m_lTotalSampleCount += rowDuration; if(adjustSamplePos) { // Super experimental and dirty sample seeking pChn = playState.Chn; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); pChn++, nChn++) { if(memory.chnSettings[nChn].ticksToRender == GetLengthMemory::IGNORE_CHANNEL) continue; uint32 startTick = 0; const ModCommand &m = pChn->rowCommand; uint32 paramHi = m.param >> 4, paramLo = m.param & 0x0F; bool porta = m.command == CMD_TONEPORTAMENTO || m.command == CMD_TONEPORTAVOL || m.volcmd == VOLCMD_TONEPORTAMENTO; bool stopNote = patternLoopStartedOnThisRow; // It's too much trouble to keep those pattern loops in sync... if(m.instr) pChn->proTrackerOffset = 0; if(m.IsNote()) { if(porta && memory.chnSettings[nChn].incChanged) { // If there's a portamento, the current channel increment mustn't be 0 in NoteChange() pChn->increment = GetChannelIncrement(pChn, pChn->nPeriod, 0); } int32 setPan = pChn->nPan; pChn->nNewNote = pChn->nLastNote; if(pChn->nNewIns != 0) InstrumentChange(pChn, pChn->nNewIns, porta); NoteChange(pChn, m.note, porta); memory.chnSettings[nChn].incChanged = true; if((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && (m.param & 0xF0) == 0xD0 && paramLo < numTicks) { startTick = paramLo; } else if(m.command == CMD_DELAYCUT && paramHi < numTicks) { startTick = paramHi; } if(rowDelay > 1 && startTick != 0 && (GetType() & (MOD_TYPE_S3M | MOD_TYPE_IT | MOD_TYPE_MPT))) { startTick += (playState.m_nMusicSpeed + tickDelay) * (rowDelay - 1); } if(!porta) memory.chnSettings[nChn].ticksToRender = 0; // Panning commands have to be re-applied after a note change with potential pan change. if(m.command == CMD_PANNING8 || ((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && paramHi == 0x8) || m.volcmd == VOLCMD_PANNING) { pChn->nPan = setPan; } if(m.command == CMD_OFFSET) { bool isExtended = false; SmpLength offset = CalculateXParam(playState.m_nPattern, playState.m_nRow, nChn, &isExtended); if(!isExtended) { offset <<= 8; if(offset == 0) offset = pChn->oldOffset; offset += static_cast<SmpLength>(pChn->nOldHiOffset) << 16; } SampleOffset(*pChn, offset); } else if(m.command == CMD_OFFSETPERCENTAGE) { SampleOffset(*pChn, Util::muldiv_unsigned(pChn->nLength, m.param, 255)); } else if(m.command == CMD_REVERSEOFFSET && pChn->pModSample != nullptr) { memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far ReverseSampleOffset(*pChn, m.param); startTick = playState.m_nMusicSpeed - 1; } else if(m.volcmd == VOLCMD_OFFSET) { if(m.vol <= CountOf(pChn->pModSample->cues) && pChn->pModSample != nullptr) { SmpLength offset; if(m.vol == 0) offset = pChn->oldOffset; else offset = pChn->oldOffset = pChn->pModSample->cues[m.vol - 1]; SampleOffset(*pChn, offset); } } } if(m.note == NOTE_KEYOFF || m.note == NOTE_NOTECUT || (m.note == NOTE_FADE && GetNumInstruments()) || ((m.command == CMD_MODCMDEX || m.command == CMD_S3MCMDEX) && (m.param & 0xF0) == 0xC0 && paramLo < numTicks) || (m.command == CMD_DELAYCUT && paramLo != 0 && startTick + paramLo < numTicks)) { stopNote = true; } if(m.command == CMD_VOLUME) { pChn->nVolume = m.param * 4; } else if(m.volcmd == VOLCMD_VOLUME) { pChn->nVolume = m.vol * 4; } if(pChn->pModSample && !stopNote) { // Check if we don't want to emulate some effect and thus stop processing. if(m.command < MAX_EFFECTS) { if(forbiddenCommands[m.command]) { stopNote = true; } else if(m.command == CMD_MODCMDEX) { // Special case: Slides using extended commands switch(m.param & 0xF0) { case 0x10: case 0x20: stopNote = true; } } } if(m.volcmd < forbiddenVolCommands.size() && forbiddenVolCommands[m.volcmd]) { stopNote = true; } } if(stopNote) { pChn->Stop(); memory.chnSettings[nChn].ticksToRender = 0; } else { if(oldTickDuration != tickDuration && oldTickDuration != 0) { memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far } switch(m.command) { case CMD_TONEPORTAVOL: case CMD_VOLUMESLIDE: case CMD_VIBRATOVOL: if(m.param || (GetType() != MOD_TYPE_MOD)) { for(uint32 i = 0; i < numTicks; i++) { pChn->isFirstTick = (i == 0); VolumeSlide(pChn, m.param); } } break; case CMD_MODCMDEX: if((m.param & 0x0F) || (GetType() & (MOD_TYPE_XM | MOD_TYPE_MT2))) { pChn->isFirstTick = true; switch(m.param & 0xF0) { case 0xA0: FineVolumeUp(pChn, m.param & 0x0F, false); break; case 0xB0: FineVolumeDown(pChn, m.param & 0x0F, false); break; } } break; case CMD_S3MCMDEX: if(m.param == 0x9E) { // Play forward memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far pChn->dwFlags.reset(CHN_PINGPONGFLAG); } else if(m.param == 0x9F) { // Reverse memory.RenderChannel(nChn, oldTickDuration); // Re-sync what we've got so far pChn->dwFlags.set(CHN_PINGPONGFLAG); if(!pChn->position.GetInt() && pChn->nLength && (m.IsNote() || !pChn->dwFlags[CHN_LOOP])) { pChn->position.Set(pChn->nLength - 1, SamplePosition::fractMax); } } else if((m.param & 0xF0) == 0x70) { // TODO //ExtendedS3MCommands(nChn, param); } break; } pChn->isFirstTick = true; switch(m.volcmd) { case VOLCMD_FINEVOLUP: FineVolumeUp(pChn, m.vol, m_playBehaviour[kITVolColMemory]); break; case VOLCMD_FINEVOLDOWN: FineVolumeDown(pChn, m.vol, m_playBehaviour[kITVolColMemory]); break; case VOLCMD_VOLSLIDEUP: case VOLCMD_VOLSLIDEDOWN: { // IT Compatibility: Volume column volume slides have their own memory // Test case: VolColMemory.it ModCommand::VOL vol = m.vol; if(vol == 0 && m_playBehaviour[kITVolColMemory]) { vol = pChn->nOldVolParam; if(vol == 0) break; } if(m.volcmd == VOLCMD_VOLSLIDEUP) vol <<= 4; for(uint32 i = 0; i < numTicks; i++) { pChn->isFirstTick = (i == 0); VolumeSlide(pChn, vol); } } break; } if(porta) { // Portamento needs immediate syncing, as the pitch changes on each tick uint32 portaTick = memory.chnSettings[nChn].ticksToRender + startTick + 1; memory.chnSettings[nChn].ticksToRender += numTicks; memory.RenderChannel(nChn, tickDuration, portaTick); } else { memory.chnSettings[nChn].ticksToRender += (numTicks - startTick); } } } } oldTickDuration = tickDuration; // Pattern loop is not executed in FT2 if there are any position jump or pattern break commands on the same row. // Pattern loop is not executed in IT if there are any position jump commands on the same row. // Test case for FT2 exception: PatLoop-Jumps.xm, PatLoop-Various.xm // Test case for IT: exception: LoopBreak.it if(patternLoopEndedOnThisRow && (!m_playBehaviour[kFT2PatternLoopWithJumps] || !(positionJumpOnThisRow || patternBreakOnThisRow)) && (!m_playBehaviour[kITPatternLoopWithJumps] || !positionJumpOnThisRow)) { std::map<double, int> startTimes; // This is really just a simple estimation for nested pattern loops. It should handle cases correctly where all parallel loops start and end on the same row. // If one of them starts or ends "in between", it will most likely calculate a wrong duration. // For S3M files, it's also way off. pChn = playState.Chn; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++) { ModCommand::COMMAND command = pChn->rowCommand.command; ModCommand::PARAM param = pChn->rowCommand.param; if((command == CMD_S3MCMDEX && param >= 0xB1 && param <= 0xBF) || (command == CMD_MODCMDEX && param >= 0x61 && param <= 0x6F)) { const double start = memory.chnSettings[nChn].patLoop; if(!startTimes[start]) startTimes[start] = 1; startTimes[start] = mpt::lcm(startTimes[start], 1 + (param & 0x0F)); } } for(const auto &i : startTimes) { memory.elapsedTime += (memory.elapsedTime - i.first) * (double)(i.second - 1); for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++) { if(memory.chnSettings[nChn].patLoop == i.first) { playState.m_lTotalSampleCount += (playState.m_lTotalSampleCount - memory.chnSettings[nChn].patLoopSmp) * (i.second - 1); if(m_playBehaviour[kITPatternLoopTargetReset] || (GetType() == MOD_TYPE_S3M)) { memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; memory.chnSettings[nChn].patLoopStart = playState.m_nRow + 1; } break; } } } if(GetType() == MOD_TYPE_IT) { // IT pattern loop start row update - at the end of a pattern loop, set pattern loop start to next row (for upcoming pattern loops with missing SB0) pChn = playState.Chn; for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++) { if((pChn->rowCommand.command == CMD_S3MCMDEX && pChn->rowCommand.param >= 0xB1 && pChn->rowCommand.param <= 0xBF)) { memory.chnSettings[nChn].patLoop = memory.elapsedTime; memory.chnSettings[nChn].patLoopSmp = playState.m_lTotalSampleCount; } } } } } // Now advance the sample positions for sample seeking on channels that are still playing if(adjustSamplePos) { for(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++) { if(memory.chnSettings[nChn].ticksToRender != GetLengthMemory::IGNORE_CHANNEL) { memory.RenderChannel(nChn, oldTickDuration); } } } if(retval.targetReached || target.mode == GetLengthTarget::NoTarget) { retval.lastOrder = playState.m_nCurrentOrder; retval.lastRow = playState.m_nRow; } retval.duration = memory.elapsedTime; results.push_back(retval); // Store final variables if(adjustMode & eAdjust) { if(retval.targetReached || target.mode == GetLengthTarget::NoTarget) { // Target found, or there is no target (i.e. play whole song)... m_PlayState = std::move(playState); m_PlayState.m_nNextRow = m_PlayState.m_nRow; m_PlayState.m_nFrameDelay = m_PlayState.m_nPatternDelay = 0; m_PlayState.m_nTickCount = Util::MaxValueOfType(m_PlayState.m_nTickCount) - 1; m_PlayState.m_bPositionChanged = true; for(CHANNELINDEX n = 0; n < GetNumChannels(); n++) { if(m_PlayState.Chn[n].nLastNote != NOTE_NONE) { m_PlayState.Chn[n].nNewNote = m_PlayState.Chn[n].nLastNote; } if(memory.chnSettings[n].vol != 0xFF && !adjustSamplePos) { m_PlayState.Chn[n].nVolume = std::min(memory.chnSettings[n].vol, uint8(64)) * 4; } } #ifndef NO_PLUGINS // If there were any PC events, update plugin parameters to their latest value. std::bitset<MAX_MIXPLUGINS> plugSetProgram; for(const auto &param : memory.plugParams) { PLUGINDEX plug = param.first.first - 1; IMixPlugin *plugin = m_MixPlugins[plug].pMixPlugin; if(plugin != nullptr) { if(!plugSetProgram[plug]) { // Used for bridged plugins to avoid sending out individual messages for each parameter. plugSetProgram.set(plug); plugin->BeginSetProgram(); } plugin->SetParameter(param.first.second, param.second / PlugParamValue(ModCommand::maxColumnValue)); } } if(plugSetProgram.any()) { for(PLUGINDEX i = 0; i < MAX_MIXPLUGINS; i++) { if(plugSetProgram[i]) { m_MixPlugins[i].pMixPlugin->EndSetProgram(); } } } #endif // NO_PLUGINS } else if(adjustMode != eAdjustOnSuccess) { // Target not found (e.g. when jumping to a hidden sub song), reset global variables... m_PlayState.m_nMusicSpeed = m_nDefaultSpeed; m_PlayState.m_nMusicTempo = m_nDefaultTempo; m_PlayState.m_nGlobalVolume = m_nDefaultGlobalVolume; } // When adjusting the playback status, we will also want to update the visited rows vector according to the current position. if(sequence != Order.GetCurrentSequenceIndex()) { Order.SetSequence(sequence); } visitedSongRows.Set(visitedRows); } return results; }
{ "deleted": [ { "line_no": 978, "char_start": 31995, "char_end": 32059, "line": "\t\t\t\tfor(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++)\n" } ], "added": [ { "line_no": 978, "char_start": 31995, "char_end": 32021, "line": "\t\t\t\tpChn = playState.Chn;\n" }, { "line_no": 979, "char_start": 32021, "char_end": 32093, "line": "\t\t\t\tfor(CHANNELINDEX nChn = 0; nChn < GetNumChannels(); nChn++, pChn++)\n" } ] }
{ "deleted": [], "added": [ { "char_start": 31999, "char_end": 32025, "chars": "pChn = playState.Chn;\n\t\t\t\t" }, { "char_start": 32078, "char_end": 32086, "chars": "Chn++, p" } ] }
github.com/OpenMPT/openmpt/commit/492022c7297ede682161d9c0ec2de15526424e76
soundlib/Snd_fx.cpp
cwe-125
GetPSDRowSize
static inline size_t GetPSDRowSize(Image *image) { if (image->depth == 1) return((image->columns+7)/8); else return(image->columns*GetPSDPacketSize(image)); }
static inline size_t GetPSDRowSize(Image *image) { if (image->depth == 1) return(((image->columns+7)/8)*GetPSDPacketSize(image)); else return(image->columns*GetPSDPacketSize(image)); }
{ "deleted": [ { "line_no": 4, "char_start": 76, "char_end": 110, "line": " return((image->columns+7)/8);\n" } ], "added": [ { "line_no": 4, "char_start": 76, "char_end": 136, "line": " return(((image->columns+7)/8)*GetPSDPacketSize(image));\n" } ] }
{ "deleted": [], "added": [ { "char_start": 88, "char_end": 89, "chars": "(" }, { "char_start": 108, "char_end": 133, "chars": ")*GetPSDPacketSize(image)" } ] }
github.com/ImageMagick/ImageMagick/commit/5f16640725b1225e6337c62526e6577f0f88edb8
coders/psd.c
cwe-125
dex_loadcode
static int dex_loadcode(RBinFile *arch, RBinDexObj *bin) { struct r_bin_t *rbin = arch->rbin; int i; int *methods = NULL; int sym_count = 0; // doublecheck?? if (!bin || bin->methods_list) { return false; } bin->code_from = UT64_MAX; bin->code_to = 0; bin->methods_list = r_list_newf ((RListFree)free); if (!bin->methods_list) { return false; } bin->imports_list = r_list_newf ((RListFree)free); if (!bin->imports_list) { r_list_free (bin->methods_list); return false; } bin->classes_list = r_list_newf ((RListFree)__r_bin_class_free); if (!bin->classes_list) { r_list_free (bin->methods_list); r_list_free (bin->imports_list); return false; } if (bin->header.method_size>bin->size) { bin->header.method_size = 0; return false; } /* WrapDown the header sizes to avoid huge allocations */ bin->header.method_size = R_MIN (bin->header.method_size, bin->size); bin->header.class_size = R_MIN (bin->header.class_size, bin->size); bin->header.strings_size = R_MIN (bin->header.strings_size, bin->size); // TODO: is this posible after R_MIN ?? if (bin->header.strings_size > bin->size) { eprintf ("Invalid strings size\n"); return false; } if (bin->classes) { ut64 amount = sizeof (int) * bin->header.method_size; if (amount > UT32_MAX || amount < bin->header.method_size) { return false; } methods = calloc (1, amount + 1); for (i = 0; i < bin->header.class_size; i++) { char *super_name, *class_name; struct dex_class_t *c = &bin->classes[i]; class_name = dex_class_name (bin, c); super_name = dex_class_super_name (bin, c); if (dexdump) { rbin->cb_printf ("Class #%d -\n", i); } parse_class (arch, bin, c, i, methods, &sym_count); free (class_name); free (super_name); } } if (methods) { int import_count = 0; int sym_count = bin->methods_list->length; for (i = 0; i < bin->header.method_size; i++) { int len = 0; if (methods[i]) { continue; } if (bin->methods[i].class_id > bin->header.types_size - 1) { continue; } if (is_class_idx_in_code_classes(bin, bin->methods[i].class_id)) { continue; } char *class_name = getstr ( bin, bin->types[bin->methods[i].class_id] .descriptor_id); if (!class_name) { free (class_name); continue; } len = strlen (class_name); if (len < 1) { continue; } class_name[len - 1] = 0; // remove last char ";" char *method_name = dex_method_name (bin, i); char *signature = dex_method_signature (bin, i); if (method_name && *method_name) { RBinImport *imp = R_NEW0 (RBinImport); imp->name = r_str_newf ("%s.method.%s%s", class_name, method_name, signature); imp->type = r_str_const ("FUNC"); imp->bind = r_str_const ("NONE"); imp->ordinal = import_count++; r_list_append (bin->imports_list, imp); RBinSymbol *sym = R_NEW0 (RBinSymbol); sym->name = r_str_newf ("imp.%s", imp->name); sym->type = r_str_const ("FUNC"); sym->bind = r_str_const ("NONE"); //XXX so damn unsafe check buffer boundaries!!!! //XXX use r_buf API!! sym->paddr = sym->vaddr = bin->b->base + bin->header.method_offset + (sizeof (struct dex_method_t) * i) ; sym->ordinal = sym_count++; r_list_append (bin->methods_list, sym); sdb_num_set (mdb, sdb_fmt (0, "method.%d", i), sym->paddr, 0); } free (method_name); free (signature); free (class_name); } free (methods); } return true; }
static int dex_loadcode(RBinFile *arch, RBinDexObj *bin) { struct r_bin_t *rbin = arch->rbin; int i; int *methods = NULL; int sym_count = 0; // doublecheck?? if (!bin || bin->methods_list) { return false; } bin->code_from = UT64_MAX; bin->code_to = 0; bin->methods_list = r_list_newf ((RListFree)free); if (!bin->methods_list) { return false; } bin->imports_list = r_list_newf ((RListFree)free); if (!bin->imports_list) { r_list_free (bin->methods_list); return false; } bin->classes_list = r_list_newf ((RListFree)__r_bin_class_free); if (!bin->classes_list) { r_list_free (bin->methods_list); r_list_free (bin->imports_list); return false; } if (bin->header.method_size>bin->size) { bin->header.method_size = 0; return false; } /* WrapDown the header sizes to avoid huge allocations */ bin->header.method_size = R_MIN (bin->header.method_size, bin->size); bin->header.class_size = R_MIN (bin->header.class_size, bin->size); bin->header.strings_size = R_MIN (bin->header.strings_size, bin->size); // TODO: is this posible after R_MIN ?? if (bin->header.strings_size > bin->size) { eprintf ("Invalid strings size\n"); return false; } if (bin->classes) { ut64 amount = sizeof (int) * bin->header.method_size; if (amount > UT32_MAX || amount < bin->header.method_size) { return false; } methods = calloc (1, amount + 1); for (i = 0; i < bin->header.class_size; i++) { char *super_name, *class_name; struct dex_class_t *c = &bin->classes[i]; class_name = dex_class_name (bin, c); super_name = dex_class_super_name (bin, c); if (dexdump) { rbin->cb_printf ("Class #%d -\n", i); } parse_class (arch, bin, c, i, methods, &sym_count); free (class_name); free (super_name); } } if (methods) { int import_count = 0; int sym_count = bin->methods_list->length; for (i = 0; i < bin->header.method_size; i++) { int len = 0; if (methods[i]) { continue; } if (bin->methods[i].class_id > bin->header.types_size) { continue; } if (is_class_idx_in_code_classes(bin, bin->methods[i].class_id)) { continue; } char *class_name = getstr ( bin, bin->types[bin->methods[i].class_id] .descriptor_id); if (!class_name) { free (class_name); continue; } len = strlen (class_name); if (len < 1) { continue; } class_name[len - 1] = 0; // remove last char ";" char *method_name = dex_method_name (bin, i); char *signature = dex_method_signature (bin, i); if (method_name && *method_name) { RBinImport *imp = R_NEW0 (RBinImport); imp->name = r_str_newf ("%s.method.%s%s", class_name, method_name, signature); imp->type = r_str_const ("FUNC"); imp->bind = r_str_const ("NONE"); imp->ordinal = import_count++; r_list_append (bin->imports_list, imp); RBinSymbol *sym = R_NEW0 (RBinSymbol); sym->name = r_str_newf ("imp.%s", imp->name); sym->type = r_str_const ("FUNC"); sym->bind = r_str_const ("NONE"); //XXX so damn unsafe check buffer boundaries!!!! //XXX use r_buf API!! sym->paddr = sym->vaddr = bin->b->base + bin->header.method_offset + (sizeof (struct dex_method_t) * i) ; sym->ordinal = sym_count++; r_list_append (bin->methods_list, sym); sdb_num_set (mdb, sdb_fmt (0, "method.%d", i), sym->paddr, 0); } free (method_name); free (signature); free (class_name); } free (methods); } return true; }
{ "deleted": [ { "line_no": 75, "char_start": 1978, "char_end": 2042, "line": "\t\t\tif (bin->methods[i].class_id > bin->header.types_size - 1) {\n" } ], "added": [ { "line_no": 75, "char_start": 1978, "char_end": 2038, "line": "\t\t\tif (bin->methods[i].class_id > bin->header.types_size) {\n" } ] }
{ "deleted": [ { "char_start": 2034, "char_end": 2038, "chars": " - 1" } ], "added": [] }
github.com/radare/radare2/commit/ead645853a63bf83d8386702cad0cf23b31d7eeb
libr/bin/p/bin_dex.c
cwe-125
ReadSUNImage
static Image *ReadSUNImage(const ImageInfo *image_info,ExceptionInfo *exception) { #define RMT_EQUAL_RGB 1 #define RMT_NONE 0 #define RMT_RAW 2 #define RT_STANDARD 1 #define RT_ENCODED 2 #define RT_FORMAT_RGB 3 typedef struct _SUNInfo { unsigned int magic, width, height, depth, length, type, maptype, maplength; } SUNInfo; Image *image; int bit; MagickBooleanType status; MagickSizeType number_pixels; register Quantum *q; register ssize_t i, x; register unsigned char *p; size_t bytes_per_line, extent, height, length; ssize_t count, y; SUNInfo sun_info; unsigned char *sun_data, *sun_pixels; /* Open image file. */ assert(image_info != (const ImageInfo *) NULL); assert(image_info->signature == MagickSignature); if (image_info->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s", image_info->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickSignature); image=AcquireImage(image_info,exception); status=OpenBlob(image_info,image,ReadBinaryBlobMode,exception); if (status == MagickFalse) { image=DestroyImageList(image); return((Image *) NULL); } /* Read SUN raster header. */ (void) ResetMagickMemory(&sun_info,0,sizeof(sun_info)); sun_info.magic=ReadBlobMSBLong(image); do { /* Verify SUN identifier. */ if (sun_info.magic != 0x59a66a95) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); sun_info.width=ReadBlobMSBLong(image); sun_info.height=ReadBlobMSBLong(image); sun_info.depth=ReadBlobMSBLong(image); sun_info.length=ReadBlobMSBLong(image); sun_info.type=ReadBlobMSBLong(image); sun_info.maptype=ReadBlobMSBLong(image); sun_info.maplength=ReadBlobMSBLong(image); extent=sun_info.height*sun_info.width; if ((sun_info.height != 0) && (sun_info.width != extent/sun_info.height)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.type != RT_STANDARD) && (sun_info.type != RT_ENCODED) && (sun_info.type != RT_FORMAT_RGB)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.maptype == RMT_NONE) && (sun_info.maplength != 0)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.depth == 0) || (sun_info.depth > 32)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.maptype != RMT_NONE) && (sun_info.maptype != RMT_EQUAL_RGB) && (sun_info.maptype != RMT_RAW)) ThrowReaderException(CoderError,"ColormapTypeNotSupported"); image->columns=sun_info.width; image->rows=sun_info.height; image->depth=sun_info.depth <= 8 ? sun_info.depth : MAGICKCORE_QUANTUM_DEPTH; if (sun_info.depth < 24) { size_t one; image->colors=sun_info.maplength; one=1; if (sun_info.maptype == RMT_NONE) image->colors=one << sun_info.depth; if (sun_info.maptype == RMT_EQUAL_RGB) image->colors=sun_info.maplength/3; if (AcquireImageColormap(image,image->colors,exception) == MagickFalse) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); } switch (sun_info.maptype) { case RMT_NONE: break; case RMT_EQUAL_RGB: { unsigned char *sun_colormap; /* Read SUN raster colormap. */ sun_colormap=(unsigned char *) AcquireQuantumMemory(image->colors, sizeof(*sun_colormap)); if (sun_colormap == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].red=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].green=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].blue=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); sun_colormap=(unsigned char *) RelinquishMagickMemory(sun_colormap); break; } case RMT_RAW: { unsigned char *sun_colormap; /* Read SUN raster colormap. */ sun_colormap=(unsigned char *) AcquireQuantumMemory(sun_info.maplength, sizeof(*sun_colormap)); if (sun_colormap == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=ReadBlob(image,sun_info.maplength,sun_colormap); if (count != (ssize_t) sun_info.maplength) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); sun_colormap=(unsigned char *) RelinquishMagickMemory(sun_colormap); break; } default: ThrowReaderException(CoderError,"ColormapTypeNotSupported"); } image->alpha_trait=sun_info.depth == 32 ? BlendPixelTrait : UndefinedPixelTrait; image->columns=sun_info.width; image->rows=sun_info.height; if (image_info->ping != MagickFalse) { (void) CloseBlob(image); return(GetFirstImageInList(image)); } status=SetImageExtent(image,image->columns,image->rows,exception); if (status == MagickFalse) return(DestroyImageList(image)); if ((sun_info.length*sizeof(*sun_data))/sizeof(*sun_data) != sun_info.length || !sun_info.length) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); number_pixels=(MagickSizeType) image->columns*image->rows; if ((sun_info.type != RT_ENCODED) && ((number_pixels*sun_info.depth) > (8*sun_info.length))) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); bytes_per_line=sun_info.width*sun_info.depth; sun_data=(unsigned char *) AcquireQuantumMemory((size_t) MagickMax( sun_info.length,bytes_per_line*sun_info.width),sizeof(*sun_data)); if (sun_data == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=(ssize_t) ReadBlob(image,sun_info.length,sun_data); if (count != (ssize_t) sun_info.length) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); height=sun_info.height; if ((height == 0) || (sun_info.width == 0) || (sun_info.depth == 0) || ((bytes_per_line/sun_info.depth) != sun_info.width)) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); bytes_per_line+=15; bytes_per_line<<=1; if ((bytes_per_line >> 1) != (sun_info.width*sun_info.depth+15)) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); bytes_per_line>>=4; sun_pixels=(unsigned char *) AcquireQuantumMemory(height, bytes_per_line*sizeof(*sun_pixels)); if (sun_pixels == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); if (sun_info.type == RT_ENCODED) (void) DecodeImage(sun_data,sun_info.length,sun_pixels,bytes_per_line* height); sun_data=(unsigned char *) RelinquishMagickMemory(sun_data); /* Convert SUN raster image to pixel packets. */ p=sun_pixels; if (sun_info.depth == 1) for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < ((ssize_t) image->columns-7); x+=8) { for (bit=7; bit >= 0; bit--) { SetPixelIndex(image,(Quantum) ((*p) & (0x01 << bit) ? 0x00 : 0x01), q); q+=GetPixelChannels(image); } p++; } if ((image->columns % 8) != 0) { for (bit=7; bit >= (int) (8-(image->columns % 8)); bit--) { SetPixelIndex(image,(Quantum) ((*p) & (0x01 << bit) ? 0x00 : 0x01),q); q+=GetPixelChannels(image); } p++; } if ((((image->columns/8)+(image->columns % 8 ? 1 : 0)) % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } else if (image->storage_class == PseudoClass) { if (bytes_per_line == 0) bytes_per_line=image->columns; length=image->rows*(image->columns+image->columns % 2); if (((sun_info.type == RT_ENCODED) && (length > (bytes_per_line*image->rows))) || ((sun_info.type != RT_ENCODED) && (length > sun_info.length))) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { SetPixelIndex(image,*p++,q); q+=GetPixelChannels(image); } if ((image->columns % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } } else { size_t bytes_per_pixel; bytes_per_pixel=3; if (image->alpha_trait != UndefinedPixelTrait) bytes_per_pixel++; if (bytes_per_line == 0) bytes_per_line=bytes_per_pixel*image->columns; length=image->rows*(bytes_per_line+bytes_per_line % 2); if (((sun_info.type == RT_ENCODED) && (length > (bytes_per_line*image->rows))) || ((sun_info.type != RT_ENCODED) && (length > sun_info.length))) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { if (image->alpha_trait != UndefinedPixelTrait) SetPixelAlpha(image,ScaleCharToQuantum(*p++),q); if (sun_info.type == RT_STANDARD) { SetPixelBlue(image,ScaleCharToQuantum(*p++),q); SetPixelGreen(image,ScaleCharToQuantum(*p++),q); SetPixelRed(image,ScaleCharToQuantum(*p++),q); } else { SetPixelRed(image,ScaleCharToQuantum(*p++),q); SetPixelGreen(image,ScaleCharToQuantum(*p++),q); SetPixelBlue(image,ScaleCharToQuantum(*p++),q); } if (image->colors != 0) { SetPixelRed(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelRed(image,q)].red),q); SetPixelGreen(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelGreen(image,q)].green),q); SetPixelBlue(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelBlue(image,q)].blue),q); } q+=GetPixelChannels(image); } if (((bytes_per_pixel*image->columns) % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } } if (image->storage_class == PseudoClass) (void) SyncImage(image,exception); sun_pixels=(unsigned char *) RelinquishMagickMemory(sun_pixels); if (EOFBlob(image) != MagickFalse) { ThrowFileException(exception,CorruptImageError,"UnexpectedEndOfFile", image->filename); break; } /* Proceed to next image. */ if (image_info->number_scenes != 0) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; sun_info.magic=ReadBlobMSBLong(image); if (sun_info.magic == 0x59a66a95) { /* Allocate next image structure. */ AcquireNextImage(image_info,image,exception); if (GetNextImageInList(image) == (Image *) NULL) { image=DestroyImageList(image); return((Image *) NULL); } image=SyncNextImageInList(image); status=SetImageProgress(image,LoadImagesTag,TellBlob(image), GetBlobSize(image)); if (status == MagickFalse) break; } } while (sun_info.magic == 0x59a66a95); (void) CloseBlob(image); return(GetFirstImageInList(image)); }
static Image *ReadSUNImage(const ImageInfo *image_info,ExceptionInfo *exception) { #define RMT_EQUAL_RGB 1 #define RMT_NONE 0 #define RMT_RAW 2 #define RT_STANDARD 1 #define RT_ENCODED 2 #define RT_FORMAT_RGB 3 typedef struct _SUNInfo { unsigned int magic, width, height, depth, length, type, maptype, maplength; } SUNInfo; Image *image; int bit; MagickBooleanType status; MagickSizeType number_pixels; register Quantum *q; register ssize_t i, x; register unsigned char *p; size_t bytes_per_line, extent, height, length; ssize_t count, y; SUNInfo sun_info; unsigned char *sun_data, *sun_pixels; /* Open image file. */ assert(image_info != (const ImageInfo *) NULL); assert(image_info->signature == MagickSignature); if (image_info->debug != MagickFalse) (void) LogMagickEvent(TraceEvent,GetMagickModule(),"%s", image_info->filename); assert(exception != (ExceptionInfo *) NULL); assert(exception->signature == MagickSignature); image=AcquireImage(image_info,exception); status=OpenBlob(image_info,image,ReadBinaryBlobMode,exception); if (status == MagickFalse) { image=DestroyImageList(image); return((Image *) NULL); } /* Read SUN raster header. */ (void) ResetMagickMemory(&sun_info,0,sizeof(sun_info)); sun_info.magic=ReadBlobMSBLong(image); do { /* Verify SUN identifier. */ if (sun_info.magic != 0x59a66a95) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); sun_info.width=ReadBlobMSBLong(image); sun_info.height=ReadBlobMSBLong(image); sun_info.depth=ReadBlobMSBLong(image); sun_info.length=ReadBlobMSBLong(image); sun_info.type=ReadBlobMSBLong(image); sun_info.maptype=ReadBlobMSBLong(image); sun_info.maplength=ReadBlobMSBLong(image); extent=sun_info.height*sun_info.width; if ((sun_info.height != 0) && (sun_info.width != extent/sun_info.height)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.type != RT_STANDARD) && (sun_info.type != RT_ENCODED) && (sun_info.type != RT_FORMAT_RGB)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.maptype == RMT_NONE) && (sun_info.maplength != 0)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.depth == 0) || (sun_info.depth > 32)) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); if ((sun_info.maptype != RMT_NONE) && (sun_info.maptype != RMT_EQUAL_RGB) && (sun_info.maptype != RMT_RAW)) ThrowReaderException(CoderError,"ColormapTypeNotSupported"); image->columns=sun_info.width; image->rows=sun_info.height; image->depth=sun_info.depth <= 8 ? sun_info.depth : MAGICKCORE_QUANTUM_DEPTH; if (sun_info.depth < 24) { size_t one; image->colors=sun_info.maplength; one=1; if (sun_info.maptype == RMT_NONE) image->colors=one << sun_info.depth; if (sun_info.maptype == RMT_EQUAL_RGB) image->colors=sun_info.maplength/3; if (AcquireImageColormap(image,image->colors,exception) == MagickFalse) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); } switch (sun_info.maptype) { case RMT_NONE: break; case RMT_EQUAL_RGB: { unsigned char *sun_colormap; /* Read SUN raster colormap. */ sun_colormap=(unsigned char *) AcquireQuantumMemory(image->colors, sizeof(*sun_colormap)); if (sun_colormap == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].red=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].green=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); count=ReadBlob(image,image->colors,sun_colormap); if (count != (ssize_t) image->colors) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); for (i=0; i < (ssize_t) image->colors; i++) image->colormap[i].blue=(MagickRealType) ScaleCharToQuantum( sun_colormap[i]); sun_colormap=(unsigned char *) RelinquishMagickMemory(sun_colormap); break; } case RMT_RAW: { unsigned char *sun_colormap; /* Read SUN raster colormap. */ sun_colormap=(unsigned char *) AcquireQuantumMemory(sun_info.maplength, sizeof(*sun_colormap)); if (sun_colormap == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=ReadBlob(image,sun_info.maplength,sun_colormap); if (count != (ssize_t) sun_info.maplength) ThrowReaderException(CorruptImageError,"UnexpectedEndOfFile"); sun_colormap=(unsigned char *) RelinquishMagickMemory(sun_colormap); break; } default: ThrowReaderException(CoderError,"ColormapTypeNotSupported"); } image->alpha_trait=sun_info.depth == 32 ? BlendPixelTrait : UndefinedPixelTrait; image->columns=sun_info.width; image->rows=sun_info.height; if (image_info->ping != MagickFalse) { (void) CloseBlob(image); return(GetFirstImageInList(image)); } status=SetImageExtent(image,image->columns,image->rows,exception); if (status == MagickFalse) return(DestroyImageList(image)); if ((sun_info.length*sizeof(*sun_data))/sizeof(*sun_data) != sun_info.length || !sun_info.length) ThrowReaderException(ResourceLimitError,"ImproperImageHeader"); number_pixels=(MagickSizeType) image->columns*image->rows; if ((sun_info.type != RT_ENCODED) && ((number_pixels*sun_info.depth) > (8*sun_info.length))) ThrowReaderException(CorruptImageError,"ImproperImageHeader"); bytes_per_line=sun_info.width*sun_info.depth; sun_data=(unsigned char *) AcquireQuantumMemory((size_t) MagickMax( sun_info.length,bytes_per_line*sun_info.width),sizeof(*sun_data)); if (sun_data == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); count=(ssize_t) ReadBlob(image,sun_info.length,sun_data); if (count != (ssize_t) sun_info.length) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); height=sun_info.height; if ((height == 0) || (sun_info.width == 0) || (sun_info.depth == 0) || ((bytes_per_line/sun_info.depth) != sun_info.width)) ThrowReaderException(ResourceLimitError,"ImproperImageHeader"); bytes_per_line+=15; bytes_per_line<<=1; if ((bytes_per_line >> 1) != (sun_info.width*sun_info.depth+15)) ThrowReaderException(ResourceLimitError,"ImproperImageHeader"); bytes_per_line>>=4; sun_pixels=(unsigned char *) AcquireQuantumMemory(height, bytes_per_line*sizeof(*sun_pixels)); if (sun_pixels == (unsigned char *) NULL) ThrowReaderException(ResourceLimitError,"MemoryAllocationFailed"); if (sun_info.type == RT_ENCODED) (void) DecodeImage(sun_data,sun_info.length,sun_pixels,bytes_per_line* height); else { if (sun_info.length > (height*bytes_per_line)) ThrowReaderException(ResourceLimitError,"ImproperImageHeader"); (void) CopyMagickMemory(sun_pixels,sun_data,sun_info.length); } sun_data=(unsigned char *) RelinquishMagickMemory(sun_data); /* Convert SUN raster image to pixel packets. */ p=sun_pixels; if (sun_info.depth == 1) for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < ((ssize_t) image->columns-7); x+=8) { for (bit=7; bit >= 0; bit--) { SetPixelIndex(image,(Quantum) ((*p) & (0x01 << bit) ? 0x00 : 0x01), q); q+=GetPixelChannels(image); } p++; } if ((image->columns % 8) != 0) { for (bit=7; bit >= (int) (8-(image->columns % 8)); bit--) { SetPixelIndex(image,(Quantum) ((*p) & (0x01 << bit) ? 0x00 : 0x01),q); q+=GetPixelChannels(image); } p++; } if ((((image->columns/8)+(image->columns % 8 ? 1 : 0)) % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } else if (image->storage_class == PseudoClass) { if (bytes_per_line == 0) bytes_per_line=image->columns; length=image->rows*(image->columns+image->columns % 2); if (((sun_info.type == RT_ENCODED) && (length > (bytes_per_line*image->rows))) || ((sun_info.type != RT_ENCODED) && (length > sun_info.length))) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { SetPixelIndex(image,*p++,q); q+=GetPixelChannels(image); } if ((image->columns % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } } else { size_t bytes_per_pixel; bytes_per_pixel=3; if (image->alpha_trait != UndefinedPixelTrait) bytes_per_pixel++; if (bytes_per_line == 0) bytes_per_line=bytes_per_pixel*image->columns; length=image->rows*(bytes_per_line+bytes_per_line % 2); if (((sun_info.type == RT_ENCODED) && (length > (bytes_per_line*image->rows))) || ((sun_info.type != RT_ENCODED) && (length > sun_info.length))) ThrowReaderException(CorruptImageError,"UnableToReadImageData"); for (y=0; y < (ssize_t) image->rows; y++) { q=QueueAuthenticPixels(image,0,y,image->columns,1,exception); if (q == (Quantum *) NULL) break; for (x=0; x < (ssize_t) image->columns; x++) { if (image->alpha_trait != UndefinedPixelTrait) SetPixelAlpha(image,ScaleCharToQuantum(*p++),q); if (sun_info.type == RT_STANDARD) { SetPixelBlue(image,ScaleCharToQuantum(*p++),q); SetPixelGreen(image,ScaleCharToQuantum(*p++),q); SetPixelRed(image,ScaleCharToQuantum(*p++),q); } else { SetPixelRed(image,ScaleCharToQuantum(*p++),q); SetPixelGreen(image,ScaleCharToQuantum(*p++),q); SetPixelBlue(image,ScaleCharToQuantum(*p++),q); } if (image->colors != 0) { SetPixelRed(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelRed(image,q)].red),q); SetPixelGreen(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelGreen(image,q)].green),q); SetPixelBlue(image,ClampToQuantum(image->colormap[(ssize_t) GetPixelBlue(image,q)].blue),q); } q+=GetPixelChannels(image); } if (((bytes_per_pixel*image->columns) % 2) != 0) p++; if (SyncAuthenticPixels(image,exception) == MagickFalse) break; if (image->previous == (Image *) NULL) { status=SetImageProgress(image,LoadImageTag,(MagickOffsetType) y, image->rows); if (status == MagickFalse) break; } } } if (image->storage_class == PseudoClass) (void) SyncImage(image,exception); sun_pixels=(unsigned char *) RelinquishMagickMemory(sun_pixels); if (EOFBlob(image) != MagickFalse) { ThrowFileException(exception,CorruptImageError,"UnexpectedEndOfFile", image->filename); break; } /* Proceed to next image. */ if (image_info->number_scenes != 0) if (image->scene >= (image_info->scene+image_info->number_scenes-1)) break; sun_info.magic=ReadBlobMSBLong(image); if (sun_info.magic == 0x59a66a95) { /* Allocate next image structure. */ AcquireNextImage(image_info,image,exception); if (GetNextImageInList(image) == (Image *) NULL) { image=DestroyImageList(image); return((Image *) NULL); } image=SyncNextImageInList(image); status=SetImageProgress(image,LoadImagesTag,TellBlob(image), GetBlobSize(image)); if (status == MagickFalse) break; } } while (sun_info.magic == 0x59a66a95); (void) CloseBlob(image); return(GetFirstImageInList(image)); }
{ "deleted": [ { "line_no": 201, "char_start": 6237, "char_end": 6310, "line": " ThrowReaderException(ResourceLimitError,\"MemoryAllocationFailed\");\n" }, { "line_no": 217, "char_start": 7201, "char_end": 7274, "line": " ThrowReaderException(ResourceLimitError,\"MemoryAllocationFailed\");\n" }, { "line_no": 221, "char_start": 7391, "char_end": 7464, "line": " ThrowReaderException(ResourceLimitError,\"MemoryAllocationFailed\");\n" } ], "added": [ { "line_no": 201, "char_start": 6237, "char_end": 6307, "line": " ThrowReaderException(ResourceLimitError,\"ImproperImageHeader\");\n" }, { "line_no": 217, "char_start": 7198, "char_end": 7268, "line": " ThrowReaderException(ResourceLimitError,\"ImproperImageHeader\");\n" }, { "line_no": 221, "char_start": 7385, "char_end": 7455, "line": " ThrowReaderException(ResourceLimitError,\"ImproperImageHeader\");\n" }, { "line_no": 230, "char_start": 7834, "char_end": 7843, "line": " else\n" }, { "line_no": 231, "char_start": 7843, "char_end": 7851, "line": " {\n" }, { "line_no": 232, "char_start": 7851, "char_end": 7906, "line": " if (sun_info.length > (height*bytes_per_line))\n" }, { "line_no": 233, "char_start": 7906, "char_end": 7980, "line": " ThrowReaderException(ResourceLimitError,\"ImproperImageHeader\");\n" }, { "line_no": 234, "char_start": 7980, "char_end": 8050, "line": " (void) CopyMagickMemory(sun_pixels,sun_data,sun_info.length);\n" }, { "line_no": 235, "char_start": 8050, "char_end": 8058, "line": " }\n" } ] }
{ "deleted": [ { "char_start": 6284, "char_end": 6286, "chars": "Me" }, { "char_start": 6289, "char_end": 6295, "chars": "yAlloc" }, { "char_start": 6296, "char_end": 6304, "chars": "tionFail" }, { "char_start": 7248, "char_end": 7250, "chars": "Me" }, { "char_start": 7253, "char_end": 7259, "chars": "yAlloc" }, { "char_start": 7260, "char_end": 7268, "chars": "tionFail" }, { "char_start": 7438, "char_end": 7440, "chars": "Me" }, { "char_start": 7443, "char_end": 7449, "chars": "yAlloc" }, { "char_start": 7450, "char_end": 7458, "chars": "tionFail" }, { "char_start": 7826, "char_end": 7826, "chars": "" } ], "added": [ { "char_start": 6284, "char_end": 6285, "chars": "I" }, { "char_start": 6286, "char_end": 6287, "chars": "p" }, { "char_start": 6289, "char_end": 6294, "chars": "perIm" }, { "char_start": 6295, "char_end": 6298, "chars": "geH" }, { "char_start": 6299, "char_end": 6300, "chars": "a" }, { "char_start": 6301, "char_end": 6303, "chars": "er" }, { "char_start": 7245, "char_end": 7246, "chars": "I" }, { "char_start": 7247, "char_end": 7248, "chars": "p" }, { "char_start": 7250, "char_end": 7255, "chars": "perIm" }, { "char_start": 7256, "char_end": 7259, "chars": "geH" }, { "char_start": 7260, "char_end": 7261, "chars": "a" }, { "char_start": 7262, "char_end": 7264, "chars": "er" }, { "char_start": 7432, "char_end": 7433, "chars": "I" }, { "char_start": 7434, "char_end": 7435, "chars": "p" }, { "char_start": 7437, "char_end": 7442, "chars": "perIm" }, { "char_start": 7443, "char_end": 7446, "chars": "geH" }, { "char_start": 7447, "char_end": 7448, "chars": "a" }, { "char_start": 7449, "char_end": 7451, "chars": "er" }, { "char_start": 7833, "char_end": 8057, "chars": "\n else\n {\n if (sun_info.length > (height*bytes_per_line))\n ThrowReaderException(ResourceLimitError,\"ImproperImageHeader\");\n (void) CopyMagickMemory(sun_pixels,sun_data,sun_info.length);\n }" } ] }
github.com/ImageMagick/ImageMagick/commit/6b4aff0f117b978502ee5bcd6e753c17aec5a961
coders/sun.c
cwe-125
php_wddx_push_element
*/ static void php_wddx_push_element(void *user_data, const XML_Char *name, const XML_Char **atts) { st_entry ent; wddx_stack *stack = (wddx_stack *)user_data; if (!strcmp(name, EL_PACKET)) { int i; if (atts) for (i=0; atts[i]; i++) { if (!strcmp(atts[i], EL_VERSION)) { /* nothing for now */ } } } else if (!strcmp(name, EL_STRING)) { ent.type = ST_STRING; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_STRING; Z_STRVAL_P(ent.data) = STR_EMPTY_ALLOC(); Z_STRLEN_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_BINARY)) { ent.type = ST_BINARY; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_STRING; Z_STRVAL_P(ent.data) = STR_EMPTY_ALLOC(); Z_STRLEN_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_CHAR)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_CHAR_CODE) && atts[i+1] && atts[i+1][0]) { char tmp_buf[2]; snprintf(tmp_buf, sizeof(tmp_buf), "%c", (char)strtol(atts[i+1], NULL, 16)); php_wddx_process_data(user_data, tmp_buf, strlen(tmp_buf)); break; } } } else if (!strcmp(name, EL_NUMBER)) { ent.type = ST_NUMBER; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_LONG; Z_LVAL_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_BOOLEAN)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_VALUE) && atts[i+1] && atts[i+1][0]) { ent.type = ST_BOOLEAN; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_BOOL; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); php_wddx_process_data(user_data, atts[i+1], strlen(atts[i+1])); break; } } } else if (!strcmp(name, EL_NULL)) { ent.type = ST_NULL; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); ZVAL_NULL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_ARRAY)) { ent.type = ST_ARRAY; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); array_init(ent.data); INIT_PZVAL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_STRUCT)) { ent.type = ST_STRUCT; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); array_init(ent.data); INIT_PZVAL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_VAR)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_NAME) && atts[i+1] && atts[i+1][0]) { if (stack->varname) efree(stack->varname); stack->varname = estrdup(atts[i+1]); break; } } } else if (!strcmp(name, EL_RECORDSET)) { int i; ent.type = ST_RECORDSET; SET_STACK_VARNAME; MAKE_STD_ZVAL(ent.data); array_init(ent.data); if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], "fieldNames") && atts[i+1] && atts[i+1][0]) { zval *tmp; char *key; char *p1, *p2, *endp; i++; endp = (char *)atts[i] + strlen(atts[i]); p1 = (char *)atts[i]; while ((p2 = php_memnstr(p1, ",", sizeof(",")-1, endp)) != NULL) { key = estrndup(p1, p2 - p1); MAKE_STD_ZVAL(tmp); array_init(tmp); add_assoc_zval_ex(ent.data, key, p2 - p1 + 1, tmp); p1 = p2 + sizeof(",")-1; efree(key); } if (p1 <= endp) { MAKE_STD_ZVAL(tmp); array_init(tmp); add_assoc_zval_ex(ent.data, p1, endp - p1 + 1, tmp); } break; } } wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_FIELD)) { int i; st_entry ent; ent.type = ST_FIELD; ent.varname = NULL; ent.data = NULL; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_NAME) && atts[i+1] && atts[i+1][0]) { st_entry *recordset; zval **field; if (wddx_stack_top(stack, (void**)&recordset) == SUCCESS && recordset->type == ST_RECORDSET && zend_hash_find(Z_ARRVAL_P(recordset->data), (char*)atts[i+1], strlen(atts[i+1])+1, (void**)&field) == SUCCESS) { ent.data = *field; } break; } } wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_DATETIME)) { ent.type = ST_DATETIME; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_LONG; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); }
*/ static void php_wddx_push_element(void *user_data, const XML_Char *name, const XML_Char **atts) { st_entry ent; wddx_stack *stack = (wddx_stack *)user_data; if (!strcmp(name, EL_PACKET)) { int i; if (atts) for (i=0; atts[i]; i++) { if (!strcmp(atts[i], EL_VERSION)) { /* nothing for now */ } } } else if (!strcmp(name, EL_STRING)) { ent.type = ST_STRING; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_STRING; Z_STRVAL_P(ent.data) = STR_EMPTY_ALLOC(); Z_STRLEN_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_BINARY)) { ent.type = ST_BINARY; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_STRING; Z_STRVAL_P(ent.data) = STR_EMPTY_ALLOC(); Z_STRLEN_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_CHAR)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_CHAR_CODE) && atts[i+1] && atts[i+1][0]) { char tmp_buf[2]; snprintf(tmp_buf, sizeof(tmp_buf), "%c", (char)strtol(atts[i+1], NULL, 16)); php_wddx_process_data(user_data, tmp_buf, strlen(tmp_buf)); break; } } } else if (!strcmp(name, EL_NUMBER)) { ent.type = ST_NUMBER; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_LONG; Z_LVAL_P(ent.data) = 0; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_BOOLEAN)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_VALUE) && atts[i+1] && atts[i+1][0]) { ent.type = ST_BOOLEAN; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_BOOL; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); php_wddx_process_data(user_data, atts[i+1], strlen(atts[i+1])); break; } } else { ent.type = ST_BOOLEAN; SET_STACK_VARNAME; ZVAL_FALSE(&ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } } else if (!strcmp(name, EL_NULL)) { ent.type = ST_NULL; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); ZVAL_NULL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_ARRAY)) { ent.type = ST_ARRAY; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); array_init(ent.data); INIT_PZVAL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_STRUCT)) { ent.type = ST_STRUCT; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); array_init(ent.data); INIT_PZVAL(ent.data); wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_VAR)) { int i; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_NAME) && atts[i+1] && atts[i+1][0]) { if (stack->varname) efree(stack->varname); stack->varname = estrdup(atts[i+1]); break; } } } else if (!strcmp(name, EL_RECORDSET)) { int i; ent.type = ST_RECORDSET; SET_STACK_VARNAME; MAKE_STD_ZVAL(ent.data); array_init(ent.data); if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], "fieldNames") && atts[i+1] && atts[i+1][0]) { zval *tmp; char *key; char *p1, *p2, *endp; i++; endp = (char *)atts[i] + strlen(atts[i]); p1 = (char *)atts[i]; while ((p2 = php_memnstr(p1, ",", sizeof(",")-1, endp)) != NULL) { key = estrndup(p1, p2 - p1); MAKE_STD_ZVAL(tmp); array_init(tmp); add_assoc_zval_ex(ent.data, key, p2 - p1 + 1, tmp); p1 = p2 + sizeof(",")-1; efree(key); } if (p1 <= endp) { MAKE_STD_ZVAL(tmp); array_init(tmp); add_assoc_zval_ex(ent.data, p1, endp - p1 + 1, tmp); } break; } } wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_FIELD)) { int i; st_entry ent; ent.type = ST_FIELD; ent.varname = NULL; ent.data = NULL; if (atts) for (i = 0; atts[i]; i++) { if (!strcmp(atts[i], EL_NAME) && atts[i+1] && atts[i+1][0]) { st_entry *recordset; zval **field; if (wddx_stack_top(stack, (void**)&recordset) == SUCCESS && recordset->type == ST_RECORDSET && zend_hash_find(Z_ARRVAL_P(recordset->data), (char*)atts[i+1], strlen(atts[i+1])+1, (void**)&field) == SUCCESS) { ent.data = *field; } break; } } wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); } else if (!strcmp(name, EL_DATETIME)) { ent.type = ST_DATETIME; SET_STACK_VARNAME; ALLOC_ZVAL(ent.data); INIT_PZVAL(ent.data); Z_TYPE_P(ent.data) = IS_LONG; wddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry)); }
{ "deleted": [], "added": [ { "line_no": 71, "char_start": 1972, "char_end": 1983, "line": "\t\t} else {\n" }, { "line_no": 72, "char_start": 1983, "char_end": 2009, "line": "\t\t\tent.type = ST_BOOLEAN;\n" }, { "line_no": 73, "char_start": 2009, "char_end": 2031, "line": "\t\t\tSET_STACK_VARNAME;\n" }, { "line_no": 74, "char_start": 2031, "char_end": 2057, "line": "\t\t\tZVAL_FALSE(&ent.data);\n" }, { "line_no": 75, "char_start": 2057, "char_end": 2122, "line": "\t\t\twddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry));\n" } ] }
{ "deleted": [], "added": [ { "char_start": 1975, "char_end": 2125, "chars": " else {\n\t\t\tent.type = ST_BOOLEAN;\n\t\t\tSET_STACK_VARNAME;\n\t\t\tZVAL_FALSE(&ent.data);\n\t\t\twddx_stack_push((wddx_stack *)stack, &ent, sizeof(st_entry));\n\t\t}" } ] }
github.com/php/php-src/commit/66fd44209d5ffcb9b3d1bc1b9fd8e35b485040c0
ext/wddx/wddx.c
cwe-125
SMB2_negotiate
SMB2_negotiate(const unsigned int xid, struct cifs_ses *ses) { struct smb_rqst rqst; struct smb2_negotiate_req *req; struct smb2_negotiate_rsp *rsp; struct kvec iov[1]; struct kvec rsp_iov; int rc = 0; int resp_buftype; struct TCP_Server_Info *server = ses->server; int blob_offset, blob_length; char *security_blob; int flags = CIFS_NEG_OP; unsigned int total_len; cifs_dbg(FYI, "Negotiate protocol\n"); if (!server) { WARN(1, "%s: server is NULL!\n", __func__); return -EIO; } rc = smb2_plain_req_init(SMB2_NEGOTIATE, NULL, (void **) &req, &total_len); if (rc) return rc; req->sync_hdr.SessionId = 0; memset(server->preauth_sha_hash, 0, SMB2_PREAUTH_HASH_SIZE); memset(ses->preauth_sha_hash, 0, SMB2_PREAUTH_HASH_SIZE); if (strcmp(ses->server->vals->version_string, SMB3ANY_VERSION_STRING) == 0) { req->Dialects[0] = cpu_to_le16(SMB30_PROT_ID); req->Dialects[1] = cpu_to_le16(SMB302_PROT_ID); req->DialectCount = cpu_to_le16(2); total_len += 4; } else if (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0) { req->Dialects[0] = cpu_to_le16(SMB21_PROT_ID); req->Dialects[1] = cpu_to_le16(SMB30_PROT_ID); req->Dialects[2] = cpu_to_le16(SMB302_PROT_ID); req->Dialects[3] = cpu_to_le16(SMB311_PROT_ID); req->DialectCount = cpu_to_le16(4); total_len += 8; } else { /* otherwise send specific dialect */ req->Dialects[0] = cpu_to_le16(ses->server->vals->protocol_id); req->DialectCount = cpu_to_le16(1); total_len += 2; } /* only one of SMB2 signing flags may be set in SMB2 request */ if (ses->sign) req->SecurityMode = cpu_to_le16(SMB2_NEGOTIATE_SIGNING_REQUIRED); else if (global_secflags & CIFSSEC_MAY_SIGN) req->SecurityMode = cpu_to_le16(SMB2_NEGOTIATE_SIGNING_ENABLED); else req->SecurityMode = 0; req->Capabilities = cpu_to_le32(ses->server->vals->req_capabilities); /* ClientGUID must be zero for SMB2.02 dialect */ if (ses->server->vals->protocol_id == SMB20_PROT_ID) memset(req->ClientGUID, 0, SMB2_CLIENT_GUID_SIZE); else { memcpy(req->ClientGUID, server->client_guid, SMB2_CLIENT_GUID_SIZE); if ((ses->server->vals->protocol_id == SMB311_PROT_ID) || (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0)) assemble_neg_contexts(req, &total_len); } iov[0].iov_base = (char *)req; iov[0].iov_len = total_len; memset(&rqst, 0, sizeof(struct smb_rqst)); rqst.rq_iov = iov; rqst.rq_nvec = 1; rc = cifs_send_recv(xid, ses, &rqst, &resp_buftype, flags, &rsp_iov); cifs_small_buf_release(req); rsp = (struct smb2_negotiate_rsp *)rsp_iov.iov_base; /* * No tcon so can't do * cifs_stats_inc(&tcon->stats.smb2_stats.smb2_com_fail[SMB2...]); */ if (rc == -EOPNOTSUPP) { cifs_dbg(VFS, "Dialect not supported by server. Consider " "specifying vers=1.0 or vers=2.0 on mount for accessing" " older servers\n"); goto neg_exit; } else if (rc != 0) goto neg_exit; if (strcmp(ses->server->vals->version_string, SMB3ANY_VERSION_STRING) == 0) { if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) { cifs_dbg(VFS, "SMB2 dialect returned but not requested\n"); return -EIO; } else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) { cifs_dbg(VFS, "SMB2.1 dialect returned but not requested\n"); return -EIO; } } else if (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0) { if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) { cifs_dbg(VFS, "SMB2 dialect returned but not requested\n"); return -EIO; } else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) { /* ops set to 3.0 by default for default so update */ ses->server->ops = &smb21_operations; } else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) ses->server->ops = &smb311_operations; } else if (le16_to_cpu(rsp->DialectRevision) != ses->server->vals->protocol_id) { /* if requested single dialect ensure returned dialect matched */ cifs_dbg(VFS, "Illegal 0x%x dialect returned: not requested\n", le16_to_cpu(rsp->DialectRevision)); return -EIO; } cifs_dbg(FYI, "mode 0x%x\n", rsp->SecurityMode); if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) cifs_dbg(FYI, "negotiated smb2.0 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) cifs_dbg(FYI, "negotiated smb2.1 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB30_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.0 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB302_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.02 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.1.1 dialect\n"); else { cifs_dbg(VFS, "Illegal dialect returned by server 0x%x\n", le16_to_cpu(rsp->DialectRevision)); rc = -EIO; goto neg_exit; } server->dialect = le16_to_cpu(rsp->DialectRevision); /* * Keep a copy of the hash after negprot. This hash will be * the starting hash value for all sessions made from this * server. */ memcpy(server->preauth_sha_hash, ses->preauth_sha_hash, SMB2_PREAUTH_HASH_SIZE); /* SMB2 only has an extended negflavor */ server->negflavor = CIFS_NEGFLAVOR_EXTENDED; /* set it to the maximum buffer size value we can send with 1 credit */ server->maxBuf = min_t(unsigned int, le32_to_cpu(rsp->MaxTransactSize), SMB2_MAX_BUFFER_SIZE); server->max_read = le32_to_cpu(rsp->MaxReadSize); server->max_write = le32_to_cpu(rsp->MaxWriteSize); server->sec_mode = le16_to_cpu(rsp->SecurityMode); if ((server->sec_mode & SMB2_SEC_MODE_FLAGS_ALL) != server->sec_mode) cifs_dbg(FYI, "Server returned unexpected security mode 0x%x\n", server->sec_mode); server->capabilities = le32_to_cpu(rsp->Capabilities); /* Internal types */ server->capabilities |= SMB2_NT_FIND | SMB2_LARGE_FILES; security_blob = smb2_get_data_area_len(&blob_offset, &blob_length, (struct smb2_sync_hdr *)rsp); /* * See MS-SMB2 section 2.2.4: if no blob, client picks default which * for us will be * ses->sectype = RawNTLMSSP; * but for time being this is our only auth choice so doesn't matter. * We just found a server which sets blob length to zero expecting raw. */ if (blob_length == 0) { cifs_dbg(FYI, "missing security blob on negprot\n"); server->sec_ntlmssp = true; } rc = cifs_enable_signing(server, ses->sign); if (rc) goto neg_exit; if (blob_length) { rc = decode_negTokenInit(security_blob, blob_length, server); if (rc == 1) rc = 0; else if (rc == 0) rc = -EIO; } if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) { if (rsp->NegotiateContextCount) rc = smb311_decode_neg_context(rsp, server, rsp_iov.iov_len); else cifs_dbg(VFS, "Missing expected negotiate contexts\n"); } neg_exit: free_rsp_buf(resp_buftype, rsp); return rc; }
SMB2_negotiate(const unsigned int xid, struct cifs_ses *ses) { struct smb_rqst rqst; struct smb2_negotiate_req *req; struct smb2_negotiate_rsp *rsp; struct kvec iov[1]; struct kvec rsp_iov; int rc = 0; int resp_buftype; struct TCP_Server_Info *server = ses->server; int blob_offset, blob_length; char *security_blob; int flags = CIFS_NEG_OP; unsigned int total_len; cifs_dbg(FYI, "Negotiate protocol\n"); if (!server) { WARN(1, "%s: server is NULL!\n", __func__); return -EIO; } rc = smb2_plain_req_init(SMB2_NEGOTIATE, NULL, (void **) &req, &total_len); if (rc) return rc; req->sync_hdr.SessionId = 0; memset(server->preauth_sha_hash, 0, SMB2_PREAUTH_HASH_SIZE); memset(ses->preauth_sha_hash, 0, SMB2_PREAUTH_HASH_SIZE); if (strcmp(ses->server->vals->version_string, SMB3ANY_VERSION_STRING) == 0) { req->Dialects[0] = cpu_to_le16(SMB30_PROT_ID); req->Dialects[1] = cpu_to_le16(SMB302_PROT_ID); req->DialectCount = cpu_to_le16(2); total_len += 4; } else if (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0) { req->Dialects[0] = cpu_to_le16(SMB21_PROT_ID); req->Dialects[1] = cpu_to_le16(SMB30_PROT_ID); req->Dialects[2] = cpu_to_le16(SMB302_PROT_ID); req->Dialects[3] = cpu_to_le16(SMB311_PROT_ID); req->DialectCount = cpu_to_le16(4); total_len += 8; } else { /* otherwise send specific dialect */ req->Dialects[0] = cpu_to_le16(ses->server->vals->protocol_id); req->DialectCount = cpu_to_le16(1); total_len += 2; } /* only one of SMB2 signing flags may be set in SMB2 request */ if (ses->sign) req->SecurityMode = cpu_to_le16(SMB2_NEGOTIATE_SIGNING_REQUIRED); else if (global_secflags & CIFSSEC_MAY_SIGN) req->SecurityMode = cpu_to_le16(SMB2_NEGOTIATE_SIGNING_ENABLED); else req->SecurityMode = 0; req->Capabilities = cpu_to_le32(ses->server->vals->req_capabilities); /* ClientGUID must be zero for SMB2.02 dialect */ if (ses->server->vals->protocol_id == SMB20_PROT_ID) memset(req->ClientGUID, 0, SMB2_CLIENT_GUID_SIZE); else { memcpy(req->ClientGUID, server->client_guid, SMB2_CLIENT_GUID_SIZE); if ((ses->server->vals->protocol_id == SMB311_PROT_ID) || (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0)) assemble_neg_contexts(req, &total_len); } iov[0].iov_base = (char *)req; iov[0].iov_len = total_len; memset(&rqst, 0, sizeof(struct smb_rqst)); rqst.rq_iov = iov; rqst.rq_nvec = 1; rc = cifs_send_recv(xid, ses, &rqst, &resp_buftype, flags, &rsp_iov); cifs_small_buf_release(req); rsp = (struct smb2_negotiate_rsp *)rsp_iov.iov_base; /* * No tcon so can't do * cifs_stats_inc(&tcon->stats.smb2_stats.smb2_com_fail[SMB2...]); */ if (rc == -EOPNOTSUPP) { cifs_dbg(VFS, "Dialect not supported by server. Consider " "specifying vers=1.0 or vers=2.0 on mount for accessing" " older servers\n"); goto neg_exit; } else if (rc != 0) goto neg_exit; if (strcmp(ses->server->vals->version_string, SMB3ANY_VERSION_STRING) == 0) { if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) { cifs_dbg(VFS, "SMB2 dialect returned but not requested\n"); return -EIO; } else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) { cifs_dbg(VFS, "SMB2.1 dialect returned but not requested\n"); return -EIO; } } else if (strcmp(ses->server->vals->version_string, SMBDEFAULT_VERSION_STRING) == 0) { if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) { cifs_dbg(VFS, "SMB2 dialect returned but not requested\n"); return -EIO; } else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) { /* ops set to 3.0 by default for default so update */ ses->server->ops = &smb21_operations; ses->server->vals = &smb21_values; } else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) { ses->server->ops = &smb311_operations; ses->server->vals = &smb311_values; } } else if (le16_to_cpu(rsp->DialectRevision) != ses->server->vals->protocol_id) { /* if requested single dialect ensure returned dialect matched */ cifs_dbg(VFS, "Illegal 0x%x dialect returned: not requested\n", le16_to_cpu(rsp->DialectRevision)); return -EIO; } cifs_dbg(FYI, "mode 0x%x\n", rsp->SecurityMode); if (rsp->DialectRevision == cpu_to_le16(SMB20_PROT_ID)) cifs_dbg(FYI, "negotiated smb2.0 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB21_PROT_ID)) cifs_dbg(FYI, "negotiated smb2.1 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB30_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.0 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB302_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.02 dialect\n"); else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) cifs_dbg(FYI, "negotiated smb3.1.1 dialect\n"); else { cifs_dbg(VFS, "Illegal dialect returned by server 0x%x\n", le16_to_cpu(rsp->DialectRevision)); rc = -EIO; goto neg_exit; } server->dialect = le16_to_cpu(rsp->DialectRevision); /* * Keep a copy of the hash after negprot. This hash will be * the starting hash value for all sessions made from this * server. */ memcpy(server->preauth_sha_hash, ses->preauth_sha_hash, SMB2_PREAUTH_HASH_SIZE); /* SMB2 only has an extended negflavor */ server->negflavor = CIFS_NEGFLAVOR_EXTENDED; /* set it to the maximum buffer size value we can send with 1 credit */ server->maxBuf = min_t(unsigned int, le32_to_cpu(rsp->MaxTransactSize), SMB2_MAX_BUFFER_SIZE); server->max_read = le32_to_cpu(rsp->MaxReadSize); server->max_write = le32_to_cpu(rsp->MaxWriteSize); server->sec_mode = le16_to_cpu(rsp->SecurityMode); if ((server->sec_mode & SMB2_SEC_MODE_FLAGS_ALL) != server->sec_mode) cifs_dbg(FYI, "Server returned unexpected security mode 0x%x\n", server->sec_mode); server->capabilities = le32_to_cpu(rsp->Capabilities); /* Internal types */ server->capabilities |= SMB2_NT_FIND | SMB2_LARGE_FILES; security_blob = smb2_get_data_area_len(&blob_offset, &blob_length, (struct smb2_sync_hdr *)rsp); /* * See MS-SMB2 section 2.2.4: if no blob, client picks default which * for us will be * ses->sectype = RawNTLMSSP; * but for time being this is our only auth choice so doesn't matter. * We just found a server which sets blob length to zero expecting raw. */ if (blob_length == 0) { cifs_dbg(FYI, "missing security blob on negprot\n"); server->sec_ntlmssp = true; } rc = cifs_enable_signing(server, ses->sign); if (rc) goto neg_exit; if (blob_length) { rc = decode_negTokenInit(security_blob, blob_length, server); if (rc == 1) rc = 0; else if (rc == 0) rc = -EIO; } if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) { if (rsp->NegotiateContextCount) rc = smb311_decode_neg_context(rsp, server, rsp_iov.iov_len); else cifs_dbg(VFS, "Missing expected negotiate contexts\n"); } neg_exit: free_rsp_buf(resp_buftype, rsp); return rc; }
{ "deleted": [ { "line_no": 116, "char_start": 3733, "char_end": 3799, "line": "\t\t} else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID))\n" } ], "added": [ { "line_no": 116, "char_start": 3733, "char_end": 3771, "line": "\t\t\tses->server->vals = &smb21_values;\n" }, { "line_no": 117, "char_start": 3771, "char_end": 3839, "line": "\t\t} else if (rsp->DialectRevision == cpu_to_le16(SMB311_PROT_ID)) {\n" }, { "line_no": 119, "char_start": 3881, "char_end": 3920, "line": "\t\t\tses->server->vals = &smb311_values;\n" }, { "line_no": 120, "char_start": 3920, "char_end": 3924, "line": "\t\t}\n" } ] }
{ "deleted": [], "added": [ { "char_start": 3735, "char_end": 3773, "chars": "\tses->server->vals = &smb21_values;\n\t\t" }, { "char_start": 3836, "char_end": 3838, "chars": " {" }, { "char_start": 3880, "char_end": 3923, "chars": "\n\t\t\tses->server->vals = &smb311_values;\n\t\t}" } ] }
github.com/torvalds/linux/commit/b57a55e2200ede754e4dc9cce4ba9402544b9365
fs/cifs/smb2pdu.c
cwe-125
parse8BIM
static ssize_t parse8BIM(Image *ifile, Image *ofile) { char brkused, quoted, *line, *token, *newstr, *name; int state, next; unsigned char dataset; unsigned int recnum; int inputlen = MaxTextExtent; MagickOffsetType savedpos, currentpos; ssize_t savedolen = 0L, outputlen = 0L; TokenInfo *token_info; dataset = 0; recnum = 0; line = (char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*line)); if (line == (char *) NULL) return(-1); newstr = name = token = (char *) NULL; savedpos = 0; token_info=AcquireTokenInfo(); while (super_fgets(&line,&inputlen,ifile)!=NULL) { state=0; next=0; token=(char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*token)); if (token == (char *) NULL) break; newstr=(char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*newstr)); if (newstr == (char *) NULL) break; while (Tokenizer(token_info,0,token,(size_t) inputlen,line,"","=","\"",0, &brkused,&next,&quoted)==0) { if (state == 0) { int state, next; char brkused, quoted; state=0; next=0; while (Tokenizer(token_info,0,newstr,(size_t) inputlen,token,"","#", "", 0,&brkused,&next,&quoted)==0) { switch (state) { case 0: if (strcmp(newstr,"8BIM")==0) dataset = 255; else dataset = (unsigned char) StringToLong(newstr); break; case 1: recnum = (unsigned int) StringToUnsignedLong(newstr); break; case 2: name=(char *) AcquireQuantumMemory(strlen(newstr)+MaxTextExtent, sizeof(*name)); if (name) (void) strcpy(name,newstr); break; } state++; } } else if (state == 1) { int next; ssize_t len; char brkused, quoted; next=0; len = (ssize_t) strlen(token); while (Tokenizer(token_info,0,newstr,(size_t) inputlen,token,"","&", "",0,&brkused,&next,&quoted)==0) { if (brkused && next > 0) { char *s = &token[next-1]; len -= (ssize_t) convertHTMLcodes(s,(int) strlen(s)); } } if (dataset == 255) { unsigned char nlen = 0; int i; if (savedolen > 0) { MagickOffsetType offset; ssize_t diff = outputlen - savedolen; currentpos = TellBlob(ofile); if (currentpos < 0) return(-1); offset=SeekBlob(ofile,savedpos,SEEK_SET); if (offset < 0) return(-1); (void) WriteBlobMSBLong(ofile,(unsigned int) diff); offset=SeekBlob(ofile,currentpos,SEEK_SET); if (offset < 0) return(-1); savedolen = 0L; } if (outputlen & 1) { (void) WriteBlobByte(ofile,0x00); outputlen++; } (void) WriteBlobString(ofile,"8BIM"); (void) WriteBlobMSBShort(ofile,(unsigned short) recnum); outputlen += 6; if (name) nlen = (unsigned char) strlen(name); (void) WriteBlobByte(ofile,nlen); outputlen++; for (i=0; i<nlen; i++) (void) WriteBlobByte(ofile,(unsigned char) name[i]); outputlen += nlen; if ((nlen & 0x01) == 0) { (void) WriteBlobByte(ofile,0x00); outputlen++; } if (recnum != IPTC_ID) { (void) WriteBlobMSBLong(ofile, (unsigned int) len); outputlen += 4; next=0; outputlen += len; while (len--) (void) WriteBlobByte(ofile,(unsigned char) token[next++]); if (outputlen & 1) { (void) WriteBlobByte(ofile,0x00); outputlen++; } } else { /* patch in a fake length for now and fix it later */ savedpos = TellBlob(ofile); if (savedpos < 0) return(-1); (void) WriteBlobMSBLong(ofile,0xFFFFFFFFU); outputlen += 4; savedolen = outputlen; } } else { if (len <= 0x7FFF) { (void) WriteBlobByte(ofile,0x1c); (void) WriteBlobByte(ofile,(unsigned char) dataset); (void) WriteBlobByte(ofile,(unsigned char) (recnum & 0xff)); (void) WriteBlobMSBShort(ofile,(unsigned short) len); outputlen += 5; next=0; outputlen += len; while (len--) (void) WriteBlobByte(ofile,(unsigned char) token[next++]); } } } state++; } if (token != (char *) NULL) token=DestroyString(token); if (newstr != (char *) NULL) newstr=DestroyString(newstr); if (name != (char *) NULL) name=DestroyString(name); } token_info=DestroyTokenInfo(token_info); if (token != (char *) NULL) token=DestroyString(token); if (newstr != (char *) NULL) newstr=DestroyString(newstr); if (name != (char *) NULL) name=DestroyString(name); line=DestroyString(line); if (savedolen > 0) { MagickOffsetType offset; ssize_t diff = outputlen - savedolen; currentpos = TellBlob(ofile); if (currentpos < 0) return(-1); offset=SeekBlob(ofile,savedpos,SEEK_SET); if (offset < 0) return(-1); (void) WriteBlobMSBLong(ofile,(unsigned int) diff); offset=SeekBlob(ofile,currentpos,SEEK_SET); if (offset < 0) return(-1); savedolen = 0L; } return(outputlen); }
static ssize_t parse8BIM(Image *ifile, Image *ofile) { char brkused, quoted, *line, *token, *newstr, *name; int state, next; unsigned char dataset; unsigned int recnum; int inputlen = MaxTextExtent; MagickOffsetType savedpos, currentpos; ssize_t savedolen = 0L, outputlen = 0L; TokenInfo *token_info; dataset = 0; recnum = 0; line = (char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*line)); if (line == (char *) NULL) return(-1); newstr = name = token = (char *) NULL; savedpos = 0; token_info=AcquireTokenInfo(); while (super_fgets(&line,&inputlen,ifile)!=NULL) { state=0; next=0; token=(char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*token)); if (token == (char *) NULL) break; newstr=(char *) AcquireQuantumMemory((size_t) inputlen,sizeof(*newstr)); if (newstr == (char *) NULL) break; while (Tokenizer(token_info,0,token,(size_t) inputlen,line,"","=","\"",0, &brkused,&next,&quoted)==0) { if (state == 0) { int state, next; char brkused, quoted; state=0; next=0; while (Tokenizer(token_info,0,newstr,(size_t) inputlen,token,"","#", "", 0,&brkused,&next,&quoted)==0) { switch (state) { case 0: if (strcmp(newstr,"8BIM")==0) dataset = 255; else dataset = (unsigned char) StringToLong(newstr); break; case 1: recnum = (unsigned int) StringToUnsignedLong(newstr); break; case 2: name=(char *) AcquireQuantumMemory(strlen(newstr)+MaxTextExtent, sizeof(*name)); if (name) (void) strcpy(name,newstr); break; } state++; } } else if (state == 1) { int next; ssize_t len; char brkused, quoted; next=0; len = (ssize_t) strlen(token); while (Tokenizer(token_info,0,newstr,(size_t) inputlen,token,"","&", "",0,&brkused,&next,&quoted)==0) { if (brkused && next > 0) { char *s = &token[next-1]; len -= (ssize_t) convertHTMLcodes(s,(int) strlen(s)); } } if (dataset == 255) { unsigned char nlen = 0; int i; if (savedolen > 0) { MagickOffsetType offset; ssize_t diff = outputlen - savedolen; currentpos = TellBlob(ofile); if (currentpos < 0) return(-1); offset=SeekBlob(ofile,savedpos,SEEK_SET); if (offset < 0) return(-1); (void) WriteBlobMSBLong(ofile,(unsigned int) diff); offset=SeekBlob(ofile,currentpos,SEEK_SET); if (offset < 0) return(-1); savedolen = 0L; } if (outputlen & 1) { (void) WriteBlobByte(ofile,0x00); outputlen++; } (void) WriteBlobString(ofile,"8BIM"); (void) WriteBlobMSBShort(ofile,(unsigned short) recnum); outputlen += 6; if (name) nlen = (unsigned char) strlen(name); (void) WriteBlobByte(ofile,nlen); outputlen++; for (i=0; i<nlen; i++) (void) WriteBlobByte(ofile,(unsigned char) name[i]); outputlen += nlen; if ((nlen & 0x01) == 0) { (void) WriteBlobByte(ofile,0x00); outputlen++; } if (recnum != IPTC_ID) { (void) WriteBlobMSBLong(ofile, (unsigned int) len); outputlen += 4; next=0; outputlen += len; while (len-- > 0) (void) WriteBlobByte(ofile,(unsigned char) token[next++]); if (outputlen & 1) { (void) WriteBlobByte(ofile,0x00); outputlen++; } } else { /* patch in a fake length for now and fix it later */ savedpos = TellBlob(ofile); if (savedpos < 0) return(-1); (void) WriteBlobMSBLong(ofile,0xFFFFFFFFU); outputlen += 4; savedolen = outputlen; } } else { if (len <= 0x7FFF) { (void) WriteBlobByte(ofile,0x1c); (void) WriteBlobByte(ofile,(unsigned char) dataset); (void) WriteBlobByte(ofile,(unsigned char) (recnum & 0xff)); (void) WriteBlobMSBShort(ofile,(unsigned short) len); outputlen += 5; next=0; outputlen += len; while (len-- > 0) (void) WriteBlobByte(ofile,(unsigned char) token[next++]); } } } state++; } if (token != (char *) NULL) token=DestroyString(token); if (newstr != (char *) NULL) newstr=DestroyString(newstr); if (name != (char *) NULL) name=DestroyString(name); } token_info=DestroyTokenInfo(token_info); if (token != (char *) NULL) token=DestroyString(token); if (newstr != (char *) NULL) newstr=DestroyString(newstr); if (name != (char *) NULL) name=DestroyString(name); line=DestroyString(line); if (savedolen > 0) { MagickOffsetType offset; ssize_t diff = outputlen - savedolen; currentpos = TellBlob(ofile); if (currentpos < 0) return(-1); offset=SeekBlob(ofile,savedpos,SEEK_SET); if (offset < 0) return(-1); (void) WriteBlobMSBLong(ofile,(unsigned int) diff); offset=SeekBlob(ofile,currentpos,SEEK_SET); if (offset < 0) return(-1); savedolen = 0L; } return(outputlen); }
{ "deleted": [ { "line_no": 173, "char_start": 4538, "char_end": 4572, "line": " while (len--)\n" }, { "line_no": 204, "char_start": 5742, "char_end": 5776, "line": " while (len--)\n" } ], "added": [ { "line_no": 173, "char_start": 4538, "char_end": 4576, "line": " while (len-- > 0)\n" }, { "line_no": 204, "char_start": 5746, "char_end": 5784, "line": " while (len-- > 0)\n" } ] }
{ "deleted": [], "added": [ { "char_start": 4570, "char_end": 4574, "chars": " > 0" }, { "char_start": 5778, "char_end": 5782, "chars": " > 0" } ] }
github.com/ImageMagick/ImageMagick/commit/97c9f438a9b3454d085895f4d1f66389fd22a0fb
coders/meta.c
cwe-125
ParseDsdiffHeaderConfig
int ParseDsdiffHeaderConfig (FILE *infile, char *infilename, char *fourcc, WavpackContext *wpc, WavpackConfig *config) { int64_t infilesize, total_samples; DFFFileHeader dff_file_header; DFFChunkHeader dff_chunk_header; uint32_t bcount; infilesize = DoGetFileSize (infile); memcpy (&dff_file_header, fourcc, 4); if ((!DoReadFile (infile, ((char *) &dff_file_header) + 4, sizeof (DFFFileHeader) - 4, &bcount) || bcount != sizeof (DFFFileHeader) - 4) || strncmp (dff_file_header.formType, "DSD ", 4)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &dff_file_header, sizeof (DFFFileHeader))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } #if 1 // this might be a little too picky... WavpackBigEndianToNative (&dff_file_header, DFFFileHeaderFormat); if (infilesize && !(config->qmode & QMODE_IGNORE_LENGTH) && dff_file_header.ckDataSize && dff_file_header.ckDataSize + 1 && dff_file_header.ckDataSize + 12 != infilesize) { error_line ("%s is not a valid .DFF file (by total size)!", infilename); return WAVPACK_SOFT_ERROR; } if (debug_logging_mode) error_line ("file header indicated length = %lld", dff_file_header.ckDataSize); #endif // loop through all elements of the DSDIFF header // (until the data chuck) and copy them to the output file while (1) { if (!DoReadFile (infile, &dff_chunk_header, sizeof (DFFChunkHeader), &bcount) || bcount != sizeof (DFFChunkHeader)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &dff_chunk_header, sizeof (DFFChunkHeader))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } WavpackBigEndianToNative (&dff_chunk_header, DFFChunkHeaderFormat); if (debug_logging_mode) error_line ("chunk header indicated length = %lld", dff_chunk_header.ckDataSize); if (!strncmp (dff_chunk_header.ckID, "FVER", 4)) { uint32_t version; if (dff_chunk_header.ckDataSize != sizeof (version) || !DoReadFile (infile, &version, sizeof (version), &bcount) || bcount != sizeof (version)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &version, sizeof (version))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } WavpackBigEndianToNative (&version, "L"); if (debug_logging_mode) error_line ("dsdiff file version = 0x%08x", version); } else if (!strncmp (dff_chunk_header.ckID, "PROP", 4)) { char *prop_chunk = malloc ((size_t) dff_chunk_header.ckDataSize); if (!DoReadFile (infile, prop_chunk, (uint32_t) dff_chunk_header.ckDataSize, &bcount) || bcount != dff_chunk_header.ckDataSize) { error_line ("%s is not a valid .DFF file!", infilename); free (prop_chunk); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, prop_chunk, (uint32_t) dff_chunk_header.ckDataSize)) { error_line ("%s", WavpackGetErrorMessage (wpc)); free (prop_chunk); return WAVPACK_SOFT_ERROR; } if (!strncmp (prop_chunk, "SND ", 4)) { char *cptr = prop_chunk + 4, *eptr = prop_chunk + dff_chunk_header.ckDataSize; uint16_t numChannels, chansSpecified, chanMask = 0; uint32_t sampleRate; while (eptr - cptr >= sizeof (dff_chunk_header)) { memcpy (&dff_chunk_header, cptr, sizeof (dff_chunk_header)); cptr += sizeof (dff_chunk_header); WavpackBigEndianToNative (&dff_chunk_header, DFFChunkHeaderFormat); if (eptr - cptr >= dff_chunk_header.ckDataSize) { if (!strncmp (dff_chunk_header.ckID, "FS ", 4) && dff_chunk_header.ckDataSize == 4) { memcpy (&sampleRate, cptr, sizeof (sampleRate)); WavpackBigEndianToNative (&sampleRate, "L"); cptr += dff_chunk_header.ckDataSize; if (debug_logging_mode) error_line ("got sample rate of %u Hz", sampleRate); } else if (!strncmp (dff_chunk_header.ckID, "CHNL", 4) && dff_chunk_header.ckDataSize >= 2) { memcpy (&numChannels, cptr, sizeof (numChannels)); WavpackBigEndianToNative (&numChannels, "S"); cptr += sizeof (numChannels); chansSpecified = (int)(dff_chunk_header.ckDataSize - sizeof (numChannels)) / 4; while (chansSpecified--) { if (!strncmp (cptr, "SLFT", 4) || !strncmp (cptr, "MLFT", 4)) chanMask |= 0x1; else if (!strncmp (cptr, "SRGT", 4) || !strncmp (cptr, "MRGT", 4)) chanMask |= 0x2; else if (!strncmp (cptr, "LS ", 4)) chanMask |= 0x10; else if (!strncmp (cptr, "RS ", 4)) chanMask |= 0x20; else if (!strncmp (cptr, "C ", 4)) chanMask |= 0x4; else if (!strncmp (cptr, "LFE ", 4)) chanMask |= 0x8; else if (debug_logging_mode) error_line ("undefined channel ID %c%c%c%c", cptr [0], cptr [1], cptr [2], cptr [3]); cptr += 4; } if (debug_logging_mode) error_line ("%d channels, mask = 0x%08x", numChannels, chanMask); } else if (!strncmp (dff_chunk_header.ckID, "CMPR", 4) && dff_chunk_header.ckDataSize >= 4) { if (strncmp (cptr, "DSD ", 4)) { error_line ("DSDIFF files must be uncompressed, not \"%c%c%c%c\"!", cptr [0], cptr [1], cptr [2], cptr [3]); free (prop_chunk); return WAVPACK_SOFT_ERROR; } cptr += dff_chunk_header.ckDataSize; } else { if (debug_logging_mode) error_line ("got PROP/SND chunk type \"%c%c%c%c\" of %d bytes", dff_chunk_header.ckID [0], dff_chunk_header.ckID [1], dff_chunk_header.ckID [2], dff_chunk_header.ckID [3], dff_chunk_header.ckDataSize); cptr += dff_chunk_header.ckDataSize; } } else { error_line ("%s is not a valid .DFF file!", infilename); free (prop_chunk); return WAVPACK_SOFT_ERROR; } } if (chanMask && (config->channel_mask || (config->qmode & QMODE_CHANS_UNASSIGNED))) { error_line ("this DSDIFF file already has channel order information!"); free (prop_chunk); return WAVPACK_SOFT_ERROR; } else if (chanMask) config->channel_mask = chanMask; config->bits_per_sample = 8; config->bytes_per_sample = 1; config->num_channels = numChannels; config->sample_rate = sampleRate / 8; config->qmode |= QMODE_DSD_MSB_FIRST; } else if (debug_logging_mode) error_line ("got unknown PROP chunk type \"%c%c%c%c\" of %d bytes", prop_chunk [0], prop_chunk [1], prop_chunk [2], prop_chunk [3], dff_chunk_header.ckDataSize); free (prop_chunk); } else if (!strncmp (dff_chunk_header.ckID, "DSD ", 4)) { total_samples = dff_chunk_header.ckDataSize / config->num_channels; break; } else { // just copy unknown chunks to output file int bytes_to_copy = (int)(((dff_chunk_header.ckDataSize) + 1) & ~(int64_t)1); char *buff = malloc (bytes_to_copy); if (debug_logging_mode) error_line ("extra unknown chunk \"%c%c%c%c\" of %d bytes", dff_chunk_header.ckID [0], dff_chunk_header.ckID [1], dff_chunk_header.ckID [2], dff_chunk_header.ckID [3], dff_chunk_header.ckDataSize); if (!DoReadFile (infile, buff, bytes_to_copy, &bcount) || bcount != bytes_to_copy || (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, buff, bytes_to_copy))) { error_line ("%s", WavpackGetErrorMessage (wpc)); free (buff); return WAVPACK_SOFT_ERROR; } free (buff); } } if (debug_logging_mode) error_line ("setting configuration with %lld samples", total_samples); if (!WavpackSetConfiguration64 (wpc, config, total_samples, NULL)) { error_line ("%s: %s", infilename, WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } return WAVPACK_NO_ERROR; }
int ParseDsdiffHeaderConfig (FILE *infile, char *infilename, char *fourcc, WavpackContext *wpc, WavpackConfig *config) { int64_t infilesize, total_samples; DFFFileHeader dff_file_header; DFFChunkHeader dff_chunk_header; uint32_t bcount; infilesize = DoGetFileSize (infile); memcpy (&dff_file_header, fourcc, 4); if ((!DoReadFile (infile, ((char *) &dff_file_header) + 4, sizeof (DFFFileHeader) - 4, &bcount) || bcount != sizeof (DFFFileHeader) - 4) || strncmp (dff_file_header.formType, "DSD ", 4)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &dff_file_header, sizeof (DFFFileHeader))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } #if 1 // this might be a little too picky... WavpackBigEndianToNative (&dff_file_header, DFFFileHeaderFormat); if (infilesize && !(config->qmode & QMODE_IGNORE_LENGTH) && dff_file_header.ckDataSize && dff_file_header.ckDataSize + 1 && dff_file_header.ckDataSize + 12 != infilesize) { error_line ("%s is not a valid .DFF file (by total size)!", infilename); return WAVPACK_SOFT_ERROR; } if (debug_logging_mode) error_line ("file header indicated length = %lld", dff_file_header.ckDataSize); #endif // loop through all elements of the DSDIFF header // (until the data chuck) and copy them to the output file while (1) { if (!DoReadFile (infile, &dff_chunk_header, sizeof (DFFChunkHeader), &bcount) || bcount != sizeof (DFFChunkHeader)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &dff_chunk_header, sizeof (DFFChunkHeader))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } WavpackBigEndianToNative (&dff_chunk_header, DFFChunkHeaderFormat); if (debug_logging_mode) error_line ("chunk header indicated length = %lld", dff_chunk_header.ckDataSize); if (!strncmp (dff_chunk_header.ckID, "FVER", 4)) { uint32_t version; if (dff_chunk_header.ckDataSize != sizeof (version) || !DoReadFile (infile, &version, sizeof (version), &bcount) || bcount != sizeof (version)) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, &version, sizeof (version))) { error_line ("%s", WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } WavpackBigEndianToNative (&version, "L"); if (debug_logging_mode) error_line ("dsdiff file version = 0x%08x", version); } else if (!strncmp (dff_chunk_header.ckID, "PROP", 4)) { char *prop_chunk; if (dff_chunk_header.ckDataSize < 4 || dff_chunk_header.ckDataSize > 1024) { error_line ("%s is not a valid .DFF file!", infilename); return WAVPACK_SOFT_ERROR; } if (debug_logging_mode) error_line ("got PROP chunk of %d bytes total", (int) dff_chunk_header.ckDataSize); prop_chunk = malloc ((size_t) dff_chunk_header.ckDataSize); if (!DoReadFile (infile, prop_chunk, (uint32_t) dff_chunk_header.ckDataSize, &bcount) || bcount != dff_chunk_header.ckDataSize) { error_line ("%s is not a valid .DFF file!", infilename); free (prop_chunk); return WAVPACK_SOFT_ERROR; } else if (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, prop_chunk, (uint32_t) dff_chunk_header.ckDataSize)) { error_line ("%s", WavpackGetErrorMessage (wpc)); free (prop_chunk); return WAVPACK_SOFT_ERROR; } if (!strncmp (prop_chunk, "SND ", 4)) { char *cptr = prop_chunk + 4, *eptr = prop_chunk + dff_chunk_header.ckDataSize; uint16_t numChannels, chansSpecified, chanMask = 0; uint32_t sampleRate; while (eptr - cptr >= sizeof (dff_chunk_header)) { memcpy (&dff_chunk_header, cptr, sizeof (dff_chunk_header)); cptr += sizeof (dff_chunk_header); WavpackBigEndianToNative (&dff_chunk_header, DFFChunkHeaderFormat); if (eptr - cptr >= dff_chunk_header.ckDataSize) { if (!strncmp (dff_chunk_header.ckID, "FS ", 4) && dff_chunk_header.ckDataSize == 4) { memcpy (&sampleRate, cptr, sizeof (sampleRate)); WavpackBigEndianToNative (&sampleRate, "L"); cptr += dff_chunk_header.ckDataSize; if (debug_logging_mode) error_line ("got sample rate of %u Hz", sampleRate); } else if (!strncmp (dff_chunk_header.ckID, "CHNL", 4) && dff_chunk_header.ckDataSize >= 2) { memcpy (&numChannels, cptr, sizeof (numChannels)); WavpackBigEndianToNative (&numChannels, "S"); cptr += sizeof (numChannels); chansSpecified = (int)(dff_chunk_header.ckDataSize - sizeof (numChannels)) / 4; while (chansSpecified--) { if (!strncmp (cptr, "SLFT", 4) || !strncmp (cptr, "MLFT", 4)) chanMask |= 0x1; else if (!strncmp (cptr, "SRGT", 4) || !strncmp (cptr, "MRGT", 4)) chanMask |= 0x2; else if (!strncmp (cptr, "LS ", 4)) chanMask |= 0x10; else if (!strncmp (cptr, "RS ", 4)) chanMask |= 0x20; else if (!strncmp (cptr, "C ", 4)) chanMask |= 0x4; else if (!strncmp (cptr, "LFE ", 4)) chanMask |= 0x8; else if (debug_logging_mode) error_line ("undefined channel ID %c%c%c%c", cptr [0], cptr [1], cptr [2], cptr [3]); cptr += 4; } if (debug_logging_mode) error_line ("%d channels, mask = 0x%08x", numChannels, chanMask); } else if (!strncmp (dff_chunk_header.ckID, "CMPR", 4) && dff_chunk_header.ckDataSize >= 4) { if (strncmp (cptr, "DSD ", 4)) { error_line ("DSDIFF files must be uncompressed, not \"%c%c%c%c\"!", cptr [0], cptr [1], cptr [2], cptr [3]); free (prop_chunk); return WAVPACK_SOFT_ERROR; } cptr += dff_chunk_header.ckDataSize; } else { if (debug_logging_mode) error_line ("got PROP/SND chunk type \"%c%c%c%c\" of %d bytes", dff_chunk_header.ckID [0], dff_chunk_header.ckID [1], dff_chunk_header.ckID [2], dff_chunk_header.ckID [3], dff_chunk_header.ckDataSize); cptr += dff_chunk_header.ckDataSize; } } else { error_line ("%s is not a valid .DFF file!", infilename); free (prop_chunk); return WAVPACK_SOFT_ERROR; } } if (chanMask && (config->channel_mask || (config->qmode & QMODE_CHANS_UNASSIGNED))) { error_line ("this DSDIFF file already has channel order information!"); free (prop_chunk); return WAVPACK_SOFT_ERROR; } else if (chanMask) config->channel_mask = chanMask; config->bits_per_sample = 8; config->bytes_per_sample = 1; config->num_channels = numChannels; config->sample_rate = sampleRate / 8; config->qmode |= QMODE_DSD_MSB_FIRST; } else if (debug_logging_mode) error_line ("got unknown PROP chunk type \"%c%c%c%c\" of %d bytes", prop_chunk [0], prop_chunk [1], prop_chunk [2], prop_chunk [3], dff_chunk_header.ckDataSize); free (prop_chunk); } else if (!strncmp (dff_chunk_header.ckID, "DSD ", 4)) { total_samples = dff_chunk_header.ckDataSize / config->num_channels; break; } else { // just copy unknown chunks to output file int bytes_to_copy = (int)(((dff_chunk_header.ckDataSize) + 1) & ~(int64_t)1); char *buff = malloc (bytes_to_copy); if (debug_logging_mode) error_line ("extra unknown chunk \"%c%c%c%c\" of %d bytes", dff_chunk_header.ckID [0], dff_chunk_header.ckID [1], dff_chunk_header.ckID [2], dff_chunk_header.ckID [3], dff_chunk_header.ckDataSize); if (!DoReadFile (infile, buff, bytes_to_copy, &bcount) || bcount != bytes_to_copy || (!(config->qmode & QMODE_NO_STORE_WRAPPER) && !WavpackAddWrapper (wpc, buff, bytes_to_copy))) { error_line ("%s", WavpackGetErrorMessage (wpc)); free (buff); return WAVPACK_SOFT_ERROR; } free (buff); } } if (debug_logging_mode) error_line ("setting configuration with %lld samples", total_samples); if (!WavpackSetConfiguration64 (wpc, config, total_samples, NULL)) { error_line ("%s: %s", infilename, WavpackGetErrorMessage (wpc)); return WAVPACK_SOFT_ERROR; } return WAVPACK_NO_ERROR; }
{ "deleted": [ { "line_no": 77, "char_start": 3244, "char_end": 3322, "line": " char *prop_chunk = malloc ((size_t) dff_chunk_header.ckDataSize);\n" } ], "added": [ { "line_no": 77, "char_start": 3244, "char_end": 3274, "line": " char *prop_chunk;\n" }, { "line_no": 78, "char_start": 3274, "char_end": 3275, "line": "\n" }, { "line_no": 79, "char_start": 3275, "char_end": 3364, "line": " if (dff_chunk_header.ckDataSize < 4 || dff_chunk_header.ckDataSize > 1024) {\n" }, { "line_no": 80, "char_start": 3364, "char_end": 3437, "line": " error_line (\"%s is not a valid .DFF file!\", infilename);\n" }, { "line_no": 81, "char_start": 3437, "char_end": 3480, "line": " return WAVPACK_SOFT_ERROR;\n" }, { "line_no": 82, "char_start": 3480, "char_end": 3494, "line": " }\n" }, { "line_no": 83, "char_start": 3494, "char_end": 3495, "line": "\n" }, { "line_no": 84, "char_start": 3495, "char_end": 3531, "line": " if (debug_logging_mode)\n" }, { "line_no": 85, "char_start": 3531, "char_end": 3631, "line": " error_line (\"got PROP chunk of %d bytes total\", (int) dff_chunk_header.ckDataSize);\n" }, { "line_no": 86, "char_start": 3631, "char_end": 3632, "line": "\n" }, { "line_no": 87, "char_start": 3632, "char_end": 3704, "line": " prop_chunk = malloc ((size_t) dff_chunk_header.ckDataSize);\n" } ] }
{ "deleted": [], "added": [ { "char_start": 3272, "char_end": 3654, "chars": ";\n\n if (dff_chunk_header.ckDataSize < 4 || dff_chunk_header.ckDataSize > 1024) {\n error_line (\"%s is not a valid .DFF file!\", infilename);\n return WAVPACK_SOFT_ERROR;\n }\n\n if (debug_logging_mode)\n error_line (\"got PROP chunk of %d bytes total\", (int) dff_chunk_header.ckDataSize);\n\n prop_chunk" } ] }
github.com/dbry/WavPack/commit/36a24c7881427d2e1e4dc1cef58f19eee0d13aec
cli/dsdiff.c
cwe-125
name_parse
name_parse(u8 *packet, int length, int *idx, char *name_out, int name_out_len) { int name_end = -1; int j = *idx; int ptr_count = 0; #define GET32(x) do { if (j + 4 > length) goto err; memcpy(&t32_, packet + j, 4); j += 4; x = ntohl(t32_); } while (0) #define GET16(x) do { if (j + 2 > length) goto err; memcpy(&t_, packet + j, 2); j += 2; x = ntohs(t_); } while (0) #define GET8(x) do { if (j >= length) goto err; x = packet[j++]; } while (0) char *cp = name_out; const char *const end = name_out + name_out_len; /* Normally, names are a series of length prefixed strings terminated */ /* with a length of 0 (the lengths are u8's < 63). */ /* However, the length can start with a pair of 1 bits and that */ /* means that the next 14 bits are a pointer within the current */ /* packet. */ for (;;) { u8 label_len; if (j >= length) return -1; GET8(label_len); if (!label_len) break; if (label_len & 0xc0) { u8 ptr_low; GET8(ptr_low); if (name_end < 0) name_end = j; j = (((int)label_len & 0x3f) << 8) + ptr_low; /* Make sure that the target offset is in-bounds. */ if (j < 0 || j >= length) return -1; /* If we've jumped more times than there are characters in the * message, we must have a loop. */ if (++ptr_count > length) return -1; continue; } if (label_len > 63) return -1; if (cp != name_out) { if (cp + 1 >= end) return -1; *cp++ = '.'; } if (cp + label_len >= end) return -1; memcpy(cp, packet + j, label_len); cp += label_len; j += label_len; } if (cp >= end) return -1; *cp = '\0'; if (name_end < 0) *idx = j; else *idx = name_end; return 0; err: return -1; }
name_parse(u8 *packet, int length, int *idx, char *name_out, int name_out_len) { int name_end = -1; int j = *idx; int ptr_count = 0; #define GET32(x) do { if (j + 4 > length) goto err; memcpy(&t32_, packet + j, 4); j += 4; x = ntohl(t32_); } while (0) #define GET16(x) do { if (j + 2 > length) goto err; memcpy(&t_, packet + j, 2); j += 2; x = ntohs(t_); } while (0) #define GET8(x) do { if (j >= length) goto err; x = packet[j++]; } while (0) char *cp = name_out; const char *const end = name_out + name_out_len; /* Normally, names are a series of length prefixed strings terminated */ /* with a length of 0 (the lengths are u8's < 63). */ /* However, the length can start with a pair of 1 bits and that */ /* means that the next 14 bits are a pointer within the current */ /* packet. */ for (;;) { u8 label_len; GET8(label_len); if (!label_len) break; if (label_len & 0xc0) { u8 ptr_low; GET8(ptr_low); if (name_end < 0) name_end = j; j = (((int)label_len & 0x3f) << 8) + ptr_low; /* Make sure that the target offset is in-bounds. */ if (j < 0 || j >= length) return -1; /* If we've jumped more times than there are characters in the * message, we must have a loop. */ if (++ptr_count > length) return -1; continue; } if (label_len > 63) return -1; if (cp != name_out) { if (cp + 1 >= end) return -1; *cp++ = '.'; } if (cp + label_len >= end) return -1; if (j + label_len > length) return -1; memcpy(cp, packet + j, label_len); cp += label_len; j += label_len; } if (cp >= end) return -1; *cp = '\0'; if (name_end < 0) *idx = j; else *idx = name_end; return 0; err: return -1; }
{ "deleted": [ { "line_no": 20, "char_start": 830, "char_end": 860, "line": "\t\tif (j >= length) return -1;\n" } ], "added": [ { "line_no": 40, "char_start": 1425, "char_end": 1466, "line": "\t\tif (j + label_len > length) return -1;\n" } ] }
{ "deleted": [ { "char_start": 832, "char_end": 862, "chars": "if (j >= length) return -1;\n\t\t" } ], "added": [ { "char_start": 1412, "char_end": 1453, "chars": ") return -1;\n\t\tif (j + label_len > length" } ] }
github.com/libevent/libevent/commit/96f64a022014a208105ead6c8a7066018449d86d
evdns.c
cwe-125